mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-17 00:40:14 +01:00
📦 chore: Bump Agents Packages (#7992)
* chore: update peer dependency for @librechat/agents to version 2.4.41 * 🔧 chore: proxy handling in OpenAI endpoint to use undici * 🔧 chore: update @anthropic-ai/sdk to version 0.52.0 and refactor proxy handling to use undici * 🔧 chore: update globIgnores in vite.config.ts to exclude index.html from caching * 🔧 ci: update proxy handling in getLLMConfig to use fetchOptions and ProxyAgent * 🔧 chore: refactor proxy handling in Anthropic and OpenAI clients to use fetchOptions * refactor: agent initialization to streamline model parameters and resendFiles handling * chore: update @google/generative-ai to version 0.24.0
This commit is contained in:
parent
97085073d2
commit
fa54c9ae90
12 changed files with 598 additions and 262 deletions
|
|
@ -190,10 +190,11 @@ class AnthropicClient extends BaseClient {
|
||||||
reverseProxyUrl: this.options.reverseProxyUrl,
|
reverseProxyUrl: this.options.reverseProxyUrl,
|
||||||
}),
|
}),
|
||||||
apiKey: this.apiKey,
|
apiKey: this.apiKey,
|
||||||
|
fetchOptions: {},
|
||||||
};
|
};
|
||||||
|
|
||||||
if (this.options.proxy) {
|
if (this.options.proxy) {
|
||||||
options.httpAgent = new HttpsProxyAgent(this.options.proxy);
|
options.fetchOptions.agent = new HttpsProxyAgent(this.options.proxy);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.options.reverseProxyUrl) {
|
if (this.options.reverseProxyUrl) {
|
||||||
|
|
|
||||||
|
|
@ -1159,6 +1159,7 @@ ${convo}
|
||||||
logger.debug('[OpenAIClient] chatCompletion', { baseURL, modelOptions });
|
logger.debug('[OpenAIClient] chatCompletion', { baseURL, modelOptions });
|
||||||
const opts = {
|
const opts = {
|
||||||
baseURL,
|
baseURL,
|
||||||
|
fetchOptions: {},
|
||||||
};
|
};
|
||||||
|
|
||||||
if (this.useOpenRouter) {
|
if (this.useOpenRouter) {
|
||||||
|
|
@ -1177,7 +1178,7 @@ ${convo}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.options.proxy) {
|
if (this.options.proxy) {
|
||||||
opts.httpAgent = new HttpsProxyAgent(this.options.proxy);
|
opts.fetchOptions.agent = new HttpsProxyAgent(this.options.proxy);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @type {TAzureConfig | undefined} */
|
/** @type {TAzureConfig | undefined} */
|
||||||
|
|
@ -1395,7 +1396,7 @@ ${convo}
|
||||||
...modelOptions,
|
...modelOptions,
|
||||||
stream: true,
|
stream: true,
|
||||||
};
|
};
|
||||||
const stream = await openai.beta.chat.completions
|
const stream = await openai.chat.completions
|
||||||
.stream(params)
|
.stream(params)
|
||||||
.on('abort', () => {
|
.on('abort', () => {
|
||||||
/* Do nothing here */
|
/* Do nothing here */
|
||||||
|
|
|
||||||
|
|
@ -309,7 +309,7 @@ describe('AnthropicClient', () => {
|
||||||
};
|
};
|
||||||
client.setOptions({ modelOptions, promptCache: true });
|
client.setOptions({ modelOptions, promptCache: true });
|
||||||
const anthropicClient = client.getClient(modelOptions);
|
const anthropicClient = client.getClient(modelOptions);
|
||||||
expect(anthropicClient.defaultHeaders).not.toHaveProperty('anthropic-beta');
|
expect(anthropicClient._options.defaultHeaders).toBeUndefined();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not add beta header for other models', () => {
|
it('should not add beta header for other models', () => {
|
||||||
|
|
@ -320,7 +320,7 @@ describe('AnthropicClient', () => {
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
const anthropicClient = client.getClient();
|
const anthropicClient = client.getClient();
|
||||||
expect(anthropicClient.defaultHeaders).not.toHaveProperty('anthropic-beta');
|
expect(anthropicClient._options.defaultHeaders).toBeUndefined();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -34,21 +34,21 @@
|
||||||
},
|
},
|
||||||
"homepage": "https://librechat.ai",
|
"homepage": "https://librechat.ai",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@anthropic-ai/sdk": "^0.37.0",
|
"@anthropic-ai/sdk": "^0.52.0",
|
||||||
"@aws-sdk/client-s3": "^3.758.0",
|
"@aws-sdk/client-s3": "^3.758.0",
|
||||||
"@aws-sdk/s3-request-presigner": "^3.758.0",
|
"@aws-sdk/s3-request-presigner": "^3.758.0",
|
||||||
"@azure/identity": "^4.7.0",
|
"@azure/identity": "^4.7.0",
|
||||||
"@azure/search-documents": "^12.0.0",
|
"@azure/search-documents": "^12.0.0",
|
||||||
"@azure/storage-blob": "^12.27.0",
|
"@azure/storage-blob": "^12.27.0",
|
||||||
"@google/generative-ai": "^0.23.0",
|
"@google/generative-ai": "^0.24.0",
|
||||||
"@googleapis/youtube": "^20.0.0",
|
"@googleapis/youtube": "^20.0.0",
|
||||||
"@keyv/redis": "^4.3.3",
|
"@keyv/redis": "^4.3.3",
|
||||||
"@langchain/community": "^0.3.44",
|
"@langchain/community": "^0.3.47",
|
||||||
"@langchain/core": "^0.3.57",
|
"@langchain/core": "^0.3.60",
|
||||||
"@langchain/google-genai": "^0.2.9",
|
"@langchain/google-genai": "^0.2.13",
|
||||||
"@langchain/google-vertexai": "^0.2.9",
|
"@langchain/google-vertexai": "^0.2.13",
|
||||||
"@langchain/textsplitters": "^0.1.0",
|
"@langchain/textsplitters": "^0.1.0",
|
||||||
"@librechat/agents": "^2.4.38",
|
"@librechat/agents": "^2.4.41",
|
||||||
"@librechat/api": "*",
|
"@librechat/api": "*",
|
||||||
"@librechat/data-schemas": "*",
|
"@librechat/data-schemas": "*",
|
||||||
"@node-saml/passport-saml": "^5.0.0",
|
"@node-saml/passport-saml": "^5.0.0",
|
||||||
|
|
|
||||||
|
|
@ -63,11 +63,17 @@ const initializeAgent = async ({
|
||||||
}
|
}
|
||||||
let currentFiles;
|
let currentFiles;
|
||||||
|
|
||||||
if (
|
const _modelOptions = structuredClone(
|
||||||
isInitialAgent &&
|
Object.assign(
|
||||||
conversationId != null &&
|
{ model: agent.model },
|
||||||
(agent.model_parameters?.resendFiles ?? true) === true
|
agent.model_parameters ?? { model: agent.model },
|
||||||
) {
|
isInitialAgent === true ? endpointOption?.model_parameters : {},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
const { resendFiles = true, ...modelOptions } = _modelOptions;
|
||||||
|
|
||||||
|
if (isInitialAgent && conversationId != null && resendFiles) {
|
||||||
const fileIds = (await getConvoFiles(conversationId)) ?? [];
|
const fileIds = (await getConvoFiles(conversationId)) ?? [];
|
||||||
/** @type {Set<EToolResources>} */
|
/** @type {Set<EToolResources>} */
|
||||||
const toolResourceSet = new Set();
|
const toolResourceSet = new Set();
|
||||||
|
|
@ -117,15 +123,11 @@ const initializeAgent = async ({
|
||||||
getOptions = initCustom;
|
getOptions = initCustom;
|
||||||
agent.provider = Providers.OPENAI;
|
agent.provider = Providers.OPENAI;
|
||||||
}
|
}
|
||||||
const model_parameters = Object.assign(
|
|
||||||
{},
|
|
||||||
agent.model_parameters ?? { model: agent.model },
|
|
||||||
isInitialAgent === true ? endpointOption?.model_parameters : {},
|
|
||||||
);
|
|
||||||
const _endpointOption =
|
const _endpointOption =
|
||||||
isInitialAgent === true
|
isInitialAgent === true
|
||||||
? Object.assign({}, endpointOption, { model_parameters })
|
? Object.assign({}, endpointOption, { model_parameters: modelOptions })
|
||||||
: { model_parameters };
|
: { model_parameters: modelOptions };
|
||||||
|
|
||||||
const options = await getOptions({
|
const options = await getOptions({
|
||||||
req,
|
req,
|
||||||
|
|
@ -136,6 +138,20 @@ const initializeAgent = async ({
|
||||||
endpointOption: _endpointOption,
|
endpointOption: _endpointOption,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const tokensModel =
|
||||||
|
agent.provider === EModelEndpoint.azureOpenAI ? agent.model : modelOptions.model;
|
||||||
|
const maxTokens = optionalChainWithEmptyCheck(
|
||||||
|
modelOptions.maxOutputTokens,
|
||||||
|
modelOptions.maxTokens,
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
const maxContextTokens = optionalChainWithEmptyCheck(
|
||||||
|
modelOptions.maxContextTokens,
|
||||||
|
modelOptions.max_context_tokens,
|
||||||
|
getModelMaxTokens(tokensModel, providerEndpointMap[provider]),
|
||||||
|
4096,
|
||||||
|
);
|
||||||
|
|
||||||
if (
|
if (
|
||||||
agent.endpoint === EModelEndpoint.azureOpenAI &&
|
agent.endpoint === EModelEndpoint.azureOpenAI &&
|
||||||
options.llmConfig?.azureOpenAIApiInstanceName == null
|
options.llmConfig?.azureOpenAIApiInstanceName == null
|
||||||
|
|
@ -148,15 +164,11 @@ const initializeAgent = async ({
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @type {import('@librechat/agents').ClientOptions} */
|
/** @type {import('@librechat/agents').ClientOptions} */
|
||||||
agent.model_parameters = Object.assign(model_parameters, options.llmConfig);
|
agent.model_parameters = { ...options.llmConfig };
|
||||||
if (options.configOptions) {
|
if (options.configOptions) {
|
||||||
agent.model_parameters.configuration = options.configOptions;
|
agent.model_parameters.configuration = options.configOptions;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!agent.model_parameters.model) {
|
|
||||||
agent.model_parameters.model = agent.model;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (agent.instructions && agent.instructions !== '') {
|
if (agent.instructions && agent.instructions !== '') {
|
||||||
agent.instructions = replaceSpecialVars({
|
agent.instructions = replaceSpecialVars({
|
||||||
text: agent.instructions,
|
text: agent.instructions,
|
||||||
|
|
@ -171,23 +183,11 @@ const initializeAgent = async ({
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const tokensModel =
|
|
||||||
agent.provider === EModelEndpoint.azureOpenAI ? agent.model : agent.model_parameters.model;
|
|
||||||
const maxTokens = optionalChainWithEmptyCheck(
|
|
||||||
agent.model_parameters.maxOutputTokens,
|
|
||||||
agent.model_parameters.maxTokens,
|
|
||||||
0,
|
|
||||||
);
|
|
||||||
const maxContextTokens = optionalChainWithEmptyCheck(
|
|
||||||
agent.model_parameters.maxContextTokens,
|
|
||||||
agent.max_context_tokens,
|
|
||||||
getModelMaxTokens(tokensModel, providerEndpointMap[provider]),
|
|
||||||
4096,
|
|
||||||
);
|
|
||||||
return {
|
return {
|
||||||
...agent,
|
...agent,
|
||||||
tools,
|
tools,
|
||||||
attachments,
|
attachments,
|
||||||
|
resendFiles,
|
||||||
toolContextMap,
|
toolContextMap,
|
||||||
maxContextTokens: (maxContextTokens - maxTokens) * 0.9,
|
maxContextTokens: (maxContextTokens - maxTokens) * 0.9,
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -130,8 +130,8 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||||
iconURL: endpointOption.iconURL,
|
iconURL: endpointOption.iconURL,
|
||||||
attachments: primaryConfig.attachments,
|
attachments: primaryConfig.attachments,
|
||||||
endpointType: endpointOption.endpointType,
|
endpointType: endpointOption.endpointType,
|
||||||
|
resendFiles: primaryConfig.resendFiles ?? true,
|
||||||
maxContextTokens: primaryConfig.maxContextTokens,
|
maxContextTokens: primaryConfig.maxContextTokens,
|
||||||
resendFiles: primaryConfig.model_parameters?.resendFiles ?? true,
|
|
||||||
endpoint:
|
endpoint:
|
||||||
primaryConfig.id === Constants.EPHEMERAL_AGENT_ID
|
primaryConfig.id === Constants.EPHEMERAL_AGENT_ID
|
||||||
? primaryConfig.endpoint
|
? primaryConfig.endpoint
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
const { ProxyAgent } = require('undici');
|
||||||
const { anthropicSettings, removeNullishValues } = require('librechat-data-provider');
|
const { anthropicSettings, removeNullishValues } = require('librechat-data-provider');
|
||||||
const { checkPromptCacheSupport, getClaudeHeaders, configureReasoning } = require('./helpers');
|
const { checkPromptCacheSupport, getClaudeHeaders, configureReasoning } = require('./helpers');
|
||||||
|
|
||||||
|
|
@ -67,7 +67,10 @@ function getLLMConfig(apiKey, options = {}) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (options.proxy) {
|
if (options.proxy) {
|
||||||
requestOptions.clientOptions.httpAgent = new HttpsProxyAgent(options.proxy);
|
const proxyAgent = new ProxyAgent(options.proxy);
|
||||||
|
requestOptions.clientOptions.fetchOptions = {
|
||||||
|
dispatcher: proxyAgent,
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
if (options.reverseProxyUrl) {
|
if (options.reverseProxyUrl) {
|
||||||
|
|
|
||||||
|
|
@ -21,8 +21,12 @@ describe('getLLMConfig', () => {
|
||||||
proxy: 'http://proxy:8080',
|
proxy: 'http://proxy:8080',
|
||||||
});
|
});
|
||||||
|
|
||||||
expect(result.llmConfig.clientOptions).toHaveProperty('httpAgent');
|
expect(result.llmConfig.clientOptions).toHaveProperty('fetchOptions');
|
||||||
expect(result.llmConfig.clientOptions.httpAgent).toHaveProperty('proxy', 'http://proxy:8080');
|
expect(result.llmConfig.clientOptions.fetchOptions).toHaveProperty('dispatcher');
|
||||||
|
expect(result.llmConfig.clientOptions.fetchOptions.dispatcher).toBeDefined();
|
||||||
|
expect(result.llmConfig.clientOptions.fetchOptions.dispatcher.constructor.name).toBe(
|
||||||
|
'ProxyAgent',
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should include reverse proxy URL when provided', () => {
|
it('should include reverse proxy URL when provided', () => {
|
||||||
|
|
|
||||||
|
|
@ -46,7 +46,7 @@ export default defineConfig(({ command }) => ({
|
||||||
'assets/maskable-icon.png',
|
'assets/maskable-icon.png',
|
||||||
'manifest.webmanifest',
|
'manifest.webmanifest',
|
||||||
],
|
],
|
||||||
globIgnores: ['images/**/*', '**/*.map'],
|
globIgnores: ['images/**/*', '**/*.map', 'index.html'],
|
||||||
maximumFileSizeToCacheInBytes: 4 * 1024 * 1024,
|
maximumFileSizeToCacheInBytes: 4 * 1024 * 1024,
|
||||||
navigateFallbackDenylist: [/^\/oauth/, /^\/api/],
|
navigateFallbackDenylist: [/^\/oauth/, /^\/api/],
|
||||||
},
|
},
|
||||||
|
|
|
||||||
744
package-lock.json
generated
744
package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
|
@ -69,7 +69,7 @@
|
||||||
"registry": "https://registry.npmjs.org/"
|
"registry": "https://registry.npmjs.org/"
|
||||||
},
|
},
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"@librechat/agents": "^2.4.37",
|
"@librechat/agents": "^2.4.41",
|
||||||
"@librechat/data-schemas": "*",
|
"@librechat/data-schemas": "*",
|
||||||
"@modelcontextprotocol/sdk": "^1.12.3",
|
"@modelcontextprotocol/sdk": "^1.12.3",
|
||||||
"axios": "^1.8.2",
|
"axios": "^1.8.2",
|
||||||
|
|
@ -80,6 +80,7 @@
|
||||||
"librechat-data-provider": "*",
|
"librechat-data-provider": "*",
|
||||||
"node-fetch": "2.7.0",
|
"node-fetch": "2.7.0",
|
||||||
"tiktoken": "^1.0.15",
|
"tiktoken": "^1.0.15",
|
||||||
|
"undici": "^7.10.0",
|
||||||
"zod": "^3.22.4"
|
"zod": "^3.22.4"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
import { HttpsProxyAgent } from 'https-proxy-agent';
|
import { ProxyAgent } from 'undici';
|
||||||
import { KnownEndpoints } from 'librechat-data-provider';
|
import { KnownEndpoints } from 'librechat-data-provider';
|
||||||
import type * as t from '~/types';
|
import type * as t from '~/types';
|
||||||
import { sanitizeModelName, constructAzureURL } from '~/utils/azure';
|
import { sanitizeModelName, constructAzureURL } from '~/utils/azure';
|
||||||
|
|
@ -102,8 +102,10 @@ export function getOpenAIConfig(
|
||||||
}
|
}
|
||||||
|
|
||||||
if (proxy) {
|
if (proxy) {
|
||||||
const proxyAgent = new HttpsProxyAgent(proxy);
|
const proxyAgent = new ProxyAgent(proxy);
|
||||||
configOptions.httpAgent = proxyAgent;
|
configOptions.fetchOptions = {
|
||||||
|
dispatcher: proxyAgent,
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
if (azure) {
|
if (azure) {
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue