mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-09-22 08:12:00 +02:00
⚙️ feat: Adjust Rate of Stream Progress (#3244)
* chore: bump data-provider and add MESSAGES CacheKey * refactor: avoid saving messages while streaming, save partial text to cache instead * fix(ci): processChunks * chore: logging aborted request to debug * feat: set stream rate for token processing * chore: specify default stream rate * fix(ci): Update AppService.js to use optional chaining for endpointLocals assignment * refactor: abstract the error handler * feat: streamRate for assistants; refactor: update default rate for token * refactor: update error handling in assistants/errors.js * refactor: update error handling in assistants/errors.js
This commit is contained in:
parent
1c282d1517
commit
5d40d0a37a
29 changed files with 661 additions and 309 deletions
|
@ -67,17 +67,18 @@ const AppService = async (app) => {
|
|||
handleRateLimits(config?.rateLimits);
|
||||
|
||||
const endpointLocals = {};
|
||||
const endpoints = config?.endpoints;
|
||||
|
||||
if (config?.endpoints?.[EModelEndpoint.azureOpenAI]) {
|
||||
if (endpoints?.[EModelEndpoint.azureOpenAI]) {
|
||||
endpointLocals[EModelEndpoint.azureOpenAI] = azureConfigSetup(config);
|
||||
checkAzureVariables();
|
||||
}
|
||||
|
||||
if (config?.endpoints?.[EModelEndpoint.azureOpenAI]?.assistants) {
|
||||
if (endpoints?.[EModelEndpoint.azureOpenAI]?.assistants) {
|
||||
endpointLocals[EModelEndpoint.azureAssistants] = azureAssistantsDefaults();
|
||||
}
|
||||
|
||||
if (config?.endpoints?.[EModelEndpoint.azureAssistants]) {
|
||||
if (endpoints?.[EModelEndpoint.azureAssistants]) {
|
||||
endpointLocals[EModelEndpoint.azureAssistants] = assistantsConfigSetup(
|
||||
config,
|
||||
EModelEndpoint.azureAssistants,
|
||||
|
@ -85,7 +86,7 @@ const AppService = async (app) => {
|
|||
);
|
||||
}
|
||||
|
||||
if (config?.endpoints?.[EModelEndpoint.assistants]) {
|
||||
if (endpoints?.[EModelEndpoint.assistants]) {
|
||||
endpointLocals[EModelEndpoint.assistants] = assistantsConfigSetup(
|
||||
config,
|
||||
EModelEndpoint.assistants,
|
||||
|
@ -93,6 +94,19 @@ const AppService = async (app) => {
|
|||
);
|
||||
}
|
||||
|
||||
if (endpoints?.[EModelEndpoint.openAI]) {
|
||||
endpointLocals[EModelEndpoint.openAI] = endpoints[EModelEndpoint.openAI];
|
||||
}
|
||||
if (endpoints?.[EModelEndpoint.google]) {
|
||||
endpointLocals[EModelEndpoint.google] = endpoints[EModelEndpoint.google];
|
||||
}
|
||||
if (endpoints?.[EModelEndpoint.anthropic]) {
|
||||
endpointLocals[EModelEndpoint.anthropic] = endpoints[EModelEndpoint.anthropic];
|
||||
}
|
||||
if (endpoints?.[EModelEndpoint.gptPlugins]) {
|
||||
endpointLocals[EModelEndpoint.gptPlugins] = endpoints[EModelEndpoint.gptPlugins];
|
||||
}
|
||||
|
||||
app.locals = {
|
||||
...defaultLocals,
|
||||
modelSpecs: config.modelSpecs,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue