mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-16 16:30:15 +01:00
* 🔧 fix: enhance client options handling in AgentClient and set default recursion limit
- Updated the recursion limit to default to 25 if not specified in agentsEConfig.
- Enhanced client options in AgentClient to include model parameters such as apiKey and anthropicApiUrl from agentModelParams.
- Updated requestOptions in the anthropic endpoint to use reverseProxyUrl as anthropicApiUrl.
* Enhance LLM configuration tests with edge case handling
* chore add return type annotation for getCustomEndpointConfig function
* fix: update modelOptions handling to use optional chaining and default to empty object in multiple endpoint initializations
* chore: update @librechat/agents to version 2.4.42
* refactor: streamline agent endpoint configuration and enhance client options handling for title generations
- Introduced a new `getProviderConfig` function to centralize provider configuration logic.
- Updated `AgentClient` to utilize the new provider configuration, improving clarity and maintainability.
- Removed redundant code related to endpoint initialization and model parameter handling.
- Enhanced error logging for missing endpoint configurations.
* fix: add abort handling for image generation and editing in OpenAIImageTools
* ci: enhance getLLMConfig tests to verify fetchOptions and dispatcher properties
* fix: use optional chaining for endpointOption properties in getOptions
* fix: increase title generation timeout from 25s to 45s, pass `endpointOption` to `getOptions`
* fix: update file filtering logic in getToolFilesByIds to ensure text field is properly checked
* fix: add error handling for empty OCR results in uploadMistralOCR and uploadAzureMistralOCR
* fix: enhance error handling in file upload to include 'No OCR result' message
* chore: update error messages in uploadMistralOCR and uploadAzureMistralOCR
* fix: enhance filtering logic in getToolFilesByIds to include context checks for OCR resources to only include files directly attached to agent
---------
Co-authored-by: Matt Burnett <matt.burnett@shopify.com>
104 lines
2.8 KiB
JavaScript
104 lines
2.8 KiB
JavaScript
const { HttpsProxyAgent } = require('https-proxy-agent');
|
|
const { createHandleLLMNewToken } = require('@librechat/api');
|
|
const {
|
|
AuthType,
|
|
Constants,
|
|
EModelEndpoint,
|
|
bedrockInputParser,
|
|
bedrockOutputParser,
|
|
removeNullishValues,
|
|
} = require('librechat-data-provider');
|
|
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
|
|
|
|
const getOptions = async ({ req, overrideModel, endpointOption }) => {
|
|
const {
|
|
BEDROCK_AWS_SECRET_ACCESS_KEY,
|
|
BEDROCK_AWS_ACCESS_KEY_ID,
|
|
BEDROCK_AWS_SESSION_TOKEN,
|
|
BEDROCK_REVERSE_PROXY,
|
|
BEDROCK_AWS_DEFAULT_REGION,
|
|
PROXY,
|
|
} = process.env;
|
|
const expiresAt = req.body.key;
|
|
const isUserProvided = BEDROCK_AWS_SECRET_ACCESS_KEY === AuthType.USER_PROVIDED;
|
|
|
|
let credentials = isUserProvided
|
|
? await getUserKey({ userId: req.user.id, name: EModelEndpoint.bedrock })
|
|
: {
|
|
accessKeyId: BEDROCK_AWS_ACCESS_KEY_ID,
|
|
secretAccessKey: BEDROCK_AWS_SECRET_ACCESS_KEY,
|
|
...(BEDROCK_AWS_SESSION_TOKEN && { sessionToken: BEDROCK_AWS_SESSION_TOKEN }),
|
|
};
|
|
|
|
if (!credentials) {
|
|
throw new Error('Bedrock credentials not provided. Please provide them again.');
|
|
}
|
|
|
|
if (
|
|
!isUserProvided &&
|
|
(credentials.accessKeyId === undefined || credentials.accessKeyId === '') &&
|
|
(credentials.secretAccessKey === undefined || credentials.secretAccessKey === '')
|
|
) {
|
|
credentials = undefined;
|
|
}
|
|
|
|
if (expiresAt && isUserProvided) {
|
|
checkUserKeyExpiry(expiresAt, EModelEndpoint.bedrock);
|
|
}
|
|
|
|
/** @type {number} */
|
|
let streamRate = Constants.DEFAULT_STREAM_RATE;
|
|
|
|
/** @type {undefined | TBaseEndpoint} */
|
|
const bedrockConfig = req.app.locals[EModelEndpoint.bedrock];
|
|
|
|
if (bedrockConfig && bedrockConfig.streamRate) {
|
|
streamRate = bedrockConfig.streamRate;
|
|
}
|
|
|
|
/** @type {undefined | TBaseEndpoint} */
|
|
const allConfig = req.app.locals.all;
|
|
if (allConfig && allConfig.streamRate) {
|
|
streamRate = allConfig.streamRate;
|
|
}
|
|
|
|
/** @type {BedrockClientOptions} */
|
|
const requestOptions = {
|
|
model: overrideModel ?? endpointOption?.model,
|
|
region: BEDROCK_AWS_DEFAULT_REGION,
|
|
};
|
|
|
|
const configOptions = {};
|
|
if (PROXY) {
|
|
/** NOTE: NOT SUPPORTED BY BEDROCK */
|
|
configOptions.httpAgent = new HttpsProxyAgent(PROXY);
|
|
}
|
|
|
|
const llmConfig = bedrockOutputParser(
|
|
bedrockInputParser.parse(
|
|
removeNullishValues(Object.assign(requestOptions, endpointOption?.model_parameters ?? {})),
|
|
),
|
|
);
|
|
|
|
if (credentials) {
|
|
llmConfig.credentials = credentials;
|
|
}
|
|
|
|
if (BEDROCK_REVERSE_PROXY) {
|
|
llmConfig.endpointHost = BEDROCK_REVERSE_PROXY;
|
|
}
|
|
|
|
llmConfig.callbacks = [
|
|
{
|
|
handleLLMNewToken: createHandleLLMNewToken(streamRate),
|
|
},
|
|
];
|
|
|
|
return {
|
|
/** @type {BedrockClientOptions} */
|
|
llmConfig,
|
|
configOptions,
|
|
};
|
|
};
|
|
|
|
module.exports = getOptions;
|