mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-09-22 08:12:00 +02:00

* feat: Refactor ModelEndHandler to collect usage metadata only if it exists * feat: google tool end handling, custom anthropic class for better token ux * refactor: differentiate between client <> request options * feat: initial support for google agents * feat: only cache messages with non-empty text * feat: Cache non-empty messages in chatV2 controller * fix: anthropic llm client options llmConfig * refactor: streamline client options handling in LLM configuration * fix: VertexAI Agent Auth & Tool Handling * fix: additional fields for llmConfig, however customHeaders are not supported by langchain, requires PR * feat: set default location for vertexai LLM configuration * fix: outdated OpenAI Client options for getLLMConfig * chore: agent provider options typing * chore: add note about currently unsupported customHeaders in langchain GenAI client * fix: skip transaction creation when rawAmount is NaN
102 lines
2.7 KiB
JavaScript
102 lines
2.7 KiB
JavaScript
const { HttpsProxyAgent } = require('https-proxy-agent');
|
|
const {
|
|
EModelEndpoint,
|
|
Constants,
|
|
AuthType,
|
|
removeNullishValues,
|
|
} = require('librechat-data-provider');
|
|
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
|
|
const { sleep } = require('~/server/utils');
|
|
|
|
const getOptions = async ({ req, endpointOption }) => {
|
|
const {
|
|
BEDROCK_AWS_SECRET_ACCESS_KEY,
|
|
BEDROCK_AWS_ACCESS_KEY_ID,
|
|
BEDROCK_AWS_SESSION_TOKEN,
|
|
BEDROCK_REVERSE_PROXY,
|
|
BEDROCK_AWS_DEFAULT_REGION,
|
|
PROXY,
|
|
} = process.env;
|
|
const expiresAt = req.body.key;
|
|
const isUserProvided = BEDROCK_AWS_SECRET_ACCESS_KEY === AuthType.USER_PROVIDED;
|
|
|
|
let credentials = isUserProvided
|
|
? await getUserKey({ userId: req.user.id, name: EModelEndpoint.bedrock })
|
|
: {
|
|
accessKeyId: BEDROCK_AWS_ACCESS_KEY_ID,
|
|
secretAccessKey: BEDROCK_AWS_SECRET_ACCESS_KEY,
|
|
...(BEDROCK_AWS_SESSION_TOKEN && { sessionToken: BEDROCK_AWS_SESSION_TOKEN }),
|
|
};
|
|
|
|
if (!credentials) {
|
|
throw new Error('Bedrock credentials not provided. Please provide them again.');
|
|
}
|
|
|
|
if (
|
|
!isUserProvided &&
|
|
(credentials.accessKeyId === undefined || credentials.accessKeyId === '') &&
|
|
(credentials.secretAccessKey === undefined || credentials.secretAccessKey === '')
|
|
) {
|
|
credentials = undefined;
|
|
}
|
|
|
|
if (expiresAt && isUserProvided) {
|
|
checkUserKeyExpiry(expiresAt, EModelEndpoint.bedrock);
|
|
}
|
|
|
|
/** @type {number} */
|
|
let streamRate = Constants.DEFAULT_STREAM_RATE;
|
|
|
|
/** @type {undefined | TBaseEndpoint} */
|
|
const bedrockConfig = req.app.locals[EModelEndpoint.bedrock];
|
|
|
|
if (bedrockConfig && bedrockConfig.streamRate) {
|
|
streamRate = bedrockConfig.streamRate;
|
|
}
|
|
|
|
/** @type {undefined | TBaseEndpoint} */
|
|
const allConfig = req.app.locals.all;
|
|
if (allConfig && allConfig.streamRate) {
|
|
streamRate = allConfig.streamRate;
|
|
}
|
|
|
|
/** @type {BedrockClientOptions} */
|
|
const requestOptions = {
|
|
model: endpointOption.model,
|
|
region: BEDROCK_AWS_DEFAULT_REGION,
|
|
streaming: true,
|
|
streamUsage: true,
|
|
callbacks: [
|
|
{
|
|
handleLLMNewToken: async () => {
|
|
if (!streamRate) {
|
|
return;
|
|
}
|
|
await sleep(streamRate);
|
|
},
|
|
},
|
|
],
|
|
};
|
|
|
|
if (credentials) {
|
|
requestOptions.credentials = credentials;
|
|
}
|
|
|
|
if (BEDROCK_REVERSE_PROXY) {
|
|
requestOptions.endpointHost = BEDROCK_REVERSE_PROXY;
|
|
}
|
|
|
|
const configOptions = {};
|
|
if (PROXY) {
|
|
/** NOTE: NOT SUPPORTED BY BEDROCK */
|
|
configOptions.httpAgent = new HttpsProxyAgent(PROXY);
|
|
}
|
|
|
|
return {
|
|
/** @type {BedrockClientOptions} */
|
|
llmConfig: removeNullishValues(Object.assign(requestOptions, endpointOption.model_parameters)),
|
|
configOptions,
|
|
};
|
|
};
|
|
|
|
module.exports = getOptions;
|