⚙️ chore: Resolve Build Warning, Package Cleanup, Robust Temp Chat Time (#9962)
Some checks are pending
Docker Dev Branch Images Build / build (Dockerfile, lc-dev, node) (push) Waiting to run
Docker Dev Branch Images Build / build (Dockerfile.multi, lc-dev-api, api-build) (push) Waiting to run

* ⚙️ chore: Resolve Build Warning and `keyvMongo` types

* 🔄 chore: Update mongodb version to ^6.14.2 in package.json and package-lock.json

* chore: remove @langchain/openai dep

* 🔄 refactor: Change log level from warn to debug for missing endpoint config

* 🔄 refactor: Improve temp chat expiration date calculation in tests and implementation
This commit is contained in:
Danny Avila 2025-10-04 01:53:37 -04:00 committed by GitHub
parent c0ed738aed
commit 1b8a0bfaee
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 131 additions and 254 deletions

View file

@ -43,7 +43,6 @@ const { runTitleChain } = require('./chains');
const { extractBaseURL } = require('~/utils'); const { extractBaseURL } = require('~/utils');
const { tokenSplit } = require('./document'); const { tokenSplit } = require('./document');
const BaseClient = require('./BaseClient'); const BaseClient = require('./BaseClient');
const { createLLM } = require('./llm');
class OpenAIClient extends BaseClient { class OpenAIClient extends BaseClient {
constructor(apiKey, options = {}) { constructor(apiKey, options = {}) {
@ -614,65 +613,8 @@ class OpenAIClient extends BaseClient {
return (reply ?? '').trim(); return (reply ?? '').trim();
} }
initializeLLM({ initializeLLM() {
model = openAISettings.model.default, throw new Error('Deprecated');
modelName,
temperature = 0.2,
max_tokens,
streaming,
}) {
const modelOptions = {
modelName: modelName ?? model,
temperature,
user: this.user,
};
if (max_tokens) {
modelOptions.max_tokens = max_tokens;
}
const configOptions = {};
if (this.langchainProxy) {
configOptions.basePath = this.langchainProxy;
}
if (this.useOpenRouter) {
configOptions.basePath = 'https://openrouter.ai/api/v1';
configOptions.baseOptions = {
headers: {
'HTTP-Referer': 'https://librechat.ai',
'X-Title': 'LibreChat',
},
};
}
const { headers } = this.options;
if (headers && typeof headers === 'object' && !Array.isArray(headers)) {
configOptions.baseOptions = {
headers: resolveHeaders({
headers: {
...headers,
...configOptions?.baseOptions?.headers,
},
}),
};
}
if (this.options.proxy) {
configOptions.httpAgent = new HttpsProxyAgent(this.options.proxy);
configOptions.httpsAgent = new HttpsProxyAgent(this.options.proxy);
}
const llm = createLLM({
modelOptions,
configOptions,
openAIApiKey: this.apiKey,
azure: this.azure,
streaming,
});
return llm;
} }
/** /**

View file

@ -1,81 +0,0 @@
const { ChatOpenAI } = require('@langchain/openai');
const { isEnabled, sanitizeModelName, constructAzureURL } = require('@librechat/api');
/**
* Creates a new instance of a language model (LLM) for chat interactions.
*
* @param {Object} options - The options for creating the LLM.
* @param {ModelOptions} options.modelOptions - The options specific to the model, including modelName, temperature, presence_penalty, frequency_penalty, and other model-related settings.
* @param {ConfigOptions} options.configOptions - Configuration options for the API requests, including proxy settings and custom headers.
* @param {Callbacks} [options.callbacks] - Callback functions for managing the lifecycle of the LLM, including token buffers, context, and initial message count.
* @param {boolean} [options.streaming=false] - Determines if the LLM should operate in streaming mode.
* @param {string} options.openAIApiKey - The API key for OpenAI, used for authentication.
* @param {AzureOptions} [options.azure={}] - Optional Azure-specific configurations. If provided, Azure configurations take precedence over OpenAI configurations.
*
* @returns {ChatOpenAI} An instance of the ChatOpenAI class, configured with the provided options.
*
* @example
* const llm = createLLM({
* modelOptions: { modelName: 'gpt-4o-mini', temperature: 0.2 },
* configOptions: { basePath: 'https://example.api/path' },
* callbacks: { onMessage: handleMessage },
* openAIApiKey: 'your-api-key'
* });
*/
function createLLM({
modelOptions,
configOptions,
callbacks,
streaming = false,
openAIApiKey,
azure = {},
}) {
let credentials = { openAIApiKey };
let configuration = {
apiKey: openAIApiKey,
...(configOptions.basePath && { baseURL: configOptions.basePath }),
};
/** @type {AzureOptions} */
let azureOptions = {};
if (azure) {
const useModelName = isEnabled(process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME);
credentials = {};
configuration = {};
azureOptions = azure;
azureOptions.azureOpenAIApiDeploymentName = useModelName
? sanitizeModelName(modelOptions.modelName)
: azureOptions.azureOpenAIApiDeploymentName;
}
if (azure && process.env.AZURE_OPENAI_DEFAULT_MODEL) {
modelOptions.modelName = process.env.AZURE_OPENAI_DEFAULT_MODEL;
}
if (azure && configOptions.basePath) {
const azureURL = constructAzureURL({
baseURL: configOptions.basePath,
azureOptions,
});
azureOptions.azureOpenAIBasePath = azureURL.split(
`/${azureOptions.azureOpenAIApiDeploymentName}`,
)[0];
}
return new ChatOpenAI(
{
streaming,
credentials,
configuration,
...azureOptions,
...modelOptions,
...credentials,
callbacks,
},
configOptions,
);
}
module.exports = createLLM;

View file

@ -1,7 +1,5 @@
const createLLM = require('./createLLM');
const createCoherePayload = require('./createCoherePayload'); const createCoherePayload = require('./createCoherePayload');
module.exports = { module.exports = {
createLLM,
createCoherePayload, createCoherePayload,
}; };

View file

@ -1,31 +0,0 @@
require('dotenv').config();
const { ChatOpenAI } = require('@langchain/openai');
const { getBufferString, ConversationSummaryBufferMemory } = require('langchain/memory');
const chatPromptMemory = new ConversationSummaryBufferMemory({
llm: new ChatOpenAI({ modelName: 'gpt-4o-mini', temperature: 0 }),
maxTokenLimit: 10,
returnMessages: true,
});
(async () => {
await chatPromptMemory.saveContext({ input: 'hi my name\'s Danny' }, { output: 'whats up' });
await chatPromptMemory.saveContext({ input: 'not much you' }, { output: 'not much' });
await chatPromptMemory.saveContext(
{ input: 'are you excited for the olympics?' },
{ output: 'not really' },
);
// We can also utilize the predict_new_summary method directly.
const messages = await chatPromptMemory.chatHistory.getMessages();
console.log('MESSAGES\n\n');
console.log(JSON.stringify(messages));
const previous_summary = '';
const predictSummary = await chatPromptMemory.predictNewSummary(messages, previous_summary);
console.log('SUMMARY\n\n');
console.log(JSON.stringify(getBufferString([{ role: 'system', content: predictSummary }])));
// const { history } = await chatPromptMemory.loadMemoryVariables({});
// console.log('HISTORY\n\n');
// console.log(JSON.stringify(history));
})();

View file

@ -30,7 +30,6 @@ jest.mock('~/server/services/Config', () => ({
}), }),
})); }));
const { BaseLLM } = require('@langchain/openai');
const { Calculator } = require('@langchain/community/tools/calculator'); const { Calculator } = require('@langchain/community/tools/calculator');
const { User } = require('~/db/models'); const { User } = require('~/db/models');
@ -172,7 +171,6 @@ describe('Tool Handlers', () => {
beforeAll(async () => { beforeAll(async () => {
const toolMap = await loadTools({ const toolMap = await loadTools({
user: fakeUser._id, user: fakeUser._id,
model: BaseLLM,
tools: sampleTools, tools: sampleTools,
returnMap: true, returnMap: true,
useSpecs: true, useSpecs: true,
@ -266,7 +264,6 @@ describe('Tool Handlers', () => {
it('returns an empty object when no tools are requested', async () => { it('returns an empty object when no tools are requested', async () => {
toolFunctions = await loadTools({ toolFunctions = await loadTools({
user: fakeUser._id, user: fakeUser._id,
model: BaseLLM,
returnMap: true, returnMap: true,
useSpecs: true, useSpecs: true,
}); });
@ -276,7 +273,6 @@ describe('Tool Handlers', () => {
process.env.SD_WEBUI_URL = mockCredential; process.env.SD_WEBUI_URL = mockCredential;
toolFunctions = await loadTools({ toolFunctions = await loadTools({
user: fakeUser._id, user: fakeUser._id,
model: BaseLLM,
tools: ['stable-diffusion'], tools: ['stable-diffusion'],
functions: true, functions: true,
returnMap: true, returnMap: true,

View file

@ -46,7 +46,6 @@
"@langchain/core": "^0.3.62", "@langchain/core": "^0.3.62",
"@langchain/google-genai": "^0.2.13", "@langchain/google-genai": "^0.2.13",
"@langchain/google-vertexai": "^0.2.13", "@langchain/google-vertexai": "^0.2.13",
"@langchain/openai": "^0.5.18",
"@langchain/textsplitters": "^0.1.0", "@langchain/textsplitters": "^0.1.0",
"@librechat/agents": "^2.4.82", "@librechat/agents": "^2.4.82",
"@librechat/api": "*", "@librechat/api": "*",

View file

@ -1116,8 +1116,8 @@ class AgentClient extends BaseClient {
appConfig.endpoints?.[endpoint] ?? appConfig.endpoints?.[endpoint] ??
titleProviderConfig.customEndpointConfig; titleProviderConfig.customEndpointConfig;
if (!endpointConfig) { if (!endpointConfig) {
logger.warn( logger.debug(
'[api/server/controllers/agents/client.js #titleConvo] Error getting endpoint config', `[api/server/controllers/agents/client.js #titleConvo] No endpoint config for "${endpoint}"`,
); );
} }

162
package-lock.json generated
View file

@ -62,7 +62,6 @@
"@langchain/core": "^0.3.62", "@langchain/core": "^0.3.62",
"@langchain/google-genai": "^0.2.13", "@langchain/google-genai": "^0.2.13",
"@langchain/google-vertexai": "^0.2.13", "@langchain/google-vertexai": "^0.2.13",
"@langchain/openai": "^0.5.18",
"@langchain/textsplitters": "^0.1.0", "@langchain/textsplitters": "^0.1.0",
"@librechat/agents": "^2.4.82", "@librechat/agents": "^2.4.82",
"@librechat/api": "*", "@librechat/api": "*",
@ -2364,6 +2363,54 @@
"mkdirp": "bin/cmd.js" "mkdirp": "bin/cmd.js"
} }
}, },
"api/node_modules/mongodb": {
"version": "6.20.0",
"resolved": "https://registry.npmjs.org/mongodb/-/mongodb-6.20.0.tgz",
"integrity": "sha512-Tl6MEIU3K4Rq3TSHd+sZQqRBoGlFsOgNrH5ltAcFBV62Re3Fd+FcaVf8uSEQFOJ51SDowDVttBTONMfoYWrWlQ==",
"license": "Apache-2.0",
"optional": true,
"peer": true,
"dependencies": {
"@mongodb-js/saslprep": "^1.3.0",
"bson": "^6.10.4",
"mongodb-connection-string-url": "^3.0.2"
},
"engines": {
"node": ">=16.20.1"
},
"peerDependencies": {
"@aws-sdk/credential-providers": "^3.188.0",
"@mongodb-js/zstd": "^1.1.0 || ^2.0.0",
"gcp-metadata": "^5.2.0",
"kerberos": "^2.0.1",
"mongodb-client-encryption": ">=6.0.0 <7",
"snappy": "^7.3.2",
"socks": "^2.7.1"
},
"peerDependenciesMeta": {
"@aws-sdk/credential-providers": {
"optional": true
},
"@mongodb-js/zstd": {
"optional": true
},
"gcp-metadata": {
"optional": true
},
"kerberos": {
"optional": true
},
"mongodb-client-encryption": {
"optional": true
},
"snappy": {
"optional": true
},
"socks": {
"optional": true
}
}
},
"api/node_modules/multer": { "api/node_modules/multer": {
"version": "2.0.2", "version": "2.0.2",
"resolved": "https://registry.npmjs.org/multer/-/multer-2.0.2.tgz", "resolved": "https://registry.npmjs.org/multer/-/multer-2.0.2.tgz",
@ -21975,6 +22022,54 @@
"node": ">= 14" "node": ">= 14"
} }
}, },
"node_modules/@librechat/agents/node_modules/mongodb": {
"version": "6.20.0",
"resolved": "https://registry.npmjs.org/mongodb/-/mongodb-6.20.0.tgz",
"integrity": "sha512-Tl6MEIU3K4Rq3TSHd+sZQqRBoGlFsOgNrH5ltAcFBV62Re3Fd+FcaVf8uSEQFOJ51SDowDVttBTONMfoYWrWlQ==",
"license": "Apache-2.0",
"optional": true,
"peer": true,
"dependencies": {
"@mongodb-js/saslprep": "^1.3.0",
"bson": "^6.10.4",
"mongodb-connection-string-url": "^3.0.2"
},
"engines": {
"node": ">=16.20.1"
},
"peerDependencies": {
"@aws-sdk/credential-providers": "^3.188.0",
"@mongodb-js/zstd": "^1.1.0 || ^2.0.0",
"gcp-metadata": "^5.2.0",
"kerberos": "^2.0.1",
"mongodb-client-encryption": ">=6.0.0 <7",
"snappy": "^7.3.2",
"socks": "^2.7.1"
},
"peerDependenciesMeta": {
"@aws-sdk/credential-providers": {
"optional": true
},
"@mongodb-js/zstd": {
"optional": true
},
"gcp-metadata": {
"optional": true
},
"kerberos": {
"optional": true
},
"mongodb-client-encryption": {
"optional": true
},
"snappy": {
"optional": true
},
"socks": {
"optional": true
}
}
},
"node_modules/@librechat/agents/node_modules/openai": { "node_modules/@librechat/agents/node_modules/openai": {
"version": "5.11.0", "version": "5.11.0",
"resolved": "https://registry.npmjs.org/openai/-/openai-5.11.0.tgz", "resolved": "https://registry.npmjs.org/openai/-/openai-5.11.0.tgz",
@ -22623,9 +22718,10 @@
} }
}, },
"node_modules/@mongodb-js/saslprep": { "node_modules/@mongodb-js/saslprep": {
"version": "1.1.9", "version": "1.3.1",
"resolved": "https://registry.npmjs.org/@mongodb-js/saslprep/-/saslprep-1.1.9.tgz", "resolved": "https://registry.npmjs.org/@mongodb-js/saslprep/-/saslprep-1.3.1.tgz",
"integrity": "sha512-tVkljjeEaAhCqTzajSdgbQ6gE6f3oneVwa3iXR6csiEwXXOFsiC6Uh9iAjAhXPtqa/XMDHWjjeNH/77m/Yq2dw==", "integrity": "sha512-6nZrq5kfAz0POWyhljnbWQQJQ5uT8oE2ddX303q1uY0tWsivWKgBDXBBvuFPwOqRRalXJuVO9EjOdVtuhLX0zg==",
"license": "MIT",
"dependencies": { "dependencies": {
"sparse-bitfield": "^3.0.3" "sparse-bitfield": "^3.0.3"
} }
@ -41347,14 +41443,13 @@
} }
}, },
"node_modules/mongodb": { "node_modules/mongodb": {
"version": "6.17.0", "version": "6.14.2",
"resolved": "https://registry.npmjs.org/mongodb/-/mongodb-6.17.0.tgz", "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-6.14.2.tgz",
"integrity": "sha512-neerUzg/8U26cgruLysKEjJvoNSXhyID3RvzvdcpsIi2COYM3FS3o9nlH7fxFtefTb942dX3W9i37oPfCVj4wA==", "integrity": "sha512-kMEHNo0F3P6QKDq17zcDuPeaywK/YaJVCEQRzPF3TOM/Bl9MFg64YE5Tu7ifj37qZJMhwU1tl2Ioivws5gRG5Q==",
"devOptional": true,
"license": "Apache-2.0", "license": "Apache-2.0",
"dependencies": { "dependencies": {
"@mongodb-js/saslprep": "^1.1.9", "@mongodb-js/saslprep": "^1.1.9",
"bson": "^6.10.4", "bson": "^6.10.3",
"mongodb-connection-string-url": "^3.0.0" "mongodb-connection-string-url": "^3.0.0"
}, },
"engines": { "engines": {
@ -41510,52 +41605,6 @@
"url": "https://opencollective.com/mongoose" "url": "https://opencollective.com/mongoose"
} }
}, },
"node_modules/mongoose/node_modules/mongodb": {
"version": "6.14.2",
"resolved": "https://registry.npmjs.org/mongodb/-/mongodb-6.14.2.tgz",
"integrity": "sha512-kMEHNo0F3P6QKDq17zcDuPeaywK/YaJVCEQRzPF3TOM/Bl9MFg64YE5Tu7ifj37qZJMhwU1tl2Ioivws5gRG5Q==",
"license": "Apache-2.0",
"dependencies": {
"@mongodb-js/saslprep": "^1.1.9",
"bson": "^6.10.3",
"mongodb-connection-string-url": "^3.0.0"
},
"engines": {
"node": ">=16.20.1"
},
"peerDependencies": {
"@aws-sdk/credential-providers": "^3.188.0",
"@mongodb-js/zstd": "^1.1.0 || ^2.0.0",
"gcp-metadata": "^5.2.0",
"kerberos": "^2.0.1",
"mongodb-client-encryption": ">=6.0.0 <7",
"snappy": "^7.2.2",
"socks": "^2.7.1"
},
"peerDependenciesMeta": {
"@aws-sdk/credential-providers": {
"optional": true
},
"@mongodb-js/zstd": {
"optional": true
},
"gcp-metadata": {
"optional": true
},
"kerberos": {
"optional": true
},
"mongodb-client-encryption": {
"optional": true
},
"snappy": {
"optional": true
},
"socks": {
"optional": true
}
}
},
"node_modules/moo-color": { "node_modules/moo-color": {
"version": "1.0.3", "version": "1.0.3",
"resolved": "https://registry.npmjs.org/moo-color/-/moo-color-1.0.3.tgz", "resolved": "https://registry.npmjs.org/moo-color/-/moo-color-1.0.3.tgz",
@ -51235,7 +51284,7 @@
"keyv-file": "^5.1.2", "keyv-file": "^5.1.2",
"librechat-data-provider": "*", "librechat-data-provider": "*",
"memorystore": "^1.6.7", "memorystore": "^1.6.7",
"mongoose": "^8.12.1", "mongodb": "^6.14.2",
"rate-limit-redis": "^4.2.0", "rate-limit-redis": "^4.2.0",
"rimraf": "^5.0.1", "rimraf": "^5.0.1",
"rollup": "^4.22.4", "rollup": "^4.22.4",
@ -51263,6 +51312,7 @@
"keyv-file": "^5.1.2", "keyv-file": "^5.1.2",
"librechat-data-provider": "*", "librechat-data-provider": "*",
"memorystore": "^1.6.7", "memorystore": "^1.6.7",
"mongoose": "^8.12.1",
"node-fetch": "2.7.0", "node-fetch": "2.7.0",
"rate-limit-redis": "^4.2.0", "rate-limit-redis": "^4.2.0",
"tiktoken": "^1.0.15", "tiktoken": "^1.0.15",

View file

@ -69,7 +69,7 @@
"keyv-file": "^5.1.2", "keyv-file": "^5.1.2",
"librechat-data-provider": "*", "librechat-data-provider": "*",
"memorystore": "^1.6.7", "memorystore": "^1.6.7",
"mongoose": "^8.12.1", "mongodb": "^6.14.2",
"rate-limit-redis": "^4.2.0", "rate-limit-redis": "^4.2.0",
"rimraf": "^5.0.1", "rimraf": "^5.0.1",
"rollup": "^4.22.4", "rollup": "^4.22.4",
@ -100,6 +100,7 @@
"keyv-file": "^5.1.2", "keyv-file": "^5.1.2",
"librechat-data-provider": "*", "librechat-data-provider": "*",
"memorystore": "^1.6.7", "memorystore": "^1.6.7",
"mongoose": "^8.12.1",
"node-fetch": "2.7.0", "node-fetch": "2.7.0",
"rate-limit-redis": "^4.2.0", "rate-limit-redis": "^4.2.0",
"tiktoken": "^1.0.15", "tiktoken": "^1.0.15",

View file

@ -1,7 +1,8 @@
import mongoose from 'mongoose'; import mongoose from 'mongoose';
import { EventEmitter } from 'events'; import { EventEmitter } from 'events';
import { GridFSBucket } from 'mongodb';
import { logger } from '@librechat/data-schemas'; import { logger } from '@librechat/data-schemas';
import { GridFSBucket, type Db, type ReadPreference, type Collection } from 'mongodb'; import type { Db, ReadPreference, Collection } from 'mongodb';
interface KeyvMongoOptions { interface KeyvMongoOptions {
url?: string; url?: string;
@ -103,7 +104,7 @@ class KeyvMongoCustom extends EventEmitter {
const stream = client.bucket.openDownloadStreamByName(key); const stream = client.bucket.openDownloadStreamByName(key);
return new Promise((resolve) => { return new Promise((resolve) => {
const resp: Buffer[] = []; const resp: Uint8Array[] = [];
stream.on('error', () => { stream.on('error', () => {
resolve(undefined); resolve(undefined);
}); });
@ -113,7 +114,7 @@ class KeyvMongoCustom extends EventEmitter {
resolve(data); resolve(data);
}); });
stream.on('data', (chunk: Buffer) => { stream.on('data', (chunk: Uint8Array) => {
resp.push(chunk); resp.push(chunk);
}); });
}); });

View file

@ -92,14 +92,16 @@ describe('tempChatRetention', () => {
describe('createTempChatExpirationDate', () => { describe('createTempChatExpirationDate', () => {
it('should create expiration date with default retention period', () => { it('should create expiration date with default retention period', () => {
const beforeCall = Date.now();
const result = createTempChatExpirationDate(); const result = createTempChatExpirationDate();
const afterCall = Date.now();
const expectedDate = new Date(); const expectedMin = beforeCall + DEFAULT_RETENTION_HOURS * 60 * 60 * 1000;
expectedDate.setHours(expectedDate.getHours() + DEFAULT_RETENTION_HOURS); const expectedMax = afterCall + DEFAULT_RETENTION_HOURS * 60 * 60 * 1000;
// Allow for small time differences in test execution // Result should be between expectedMin and expectedMax
const timeDiff = Math.abs(result.getTime() - expectedDate.getTime()); expect(result.getTime()).toBeGreaterThanOrEqual(expectedMin);
expect(timeDiff).toBeLessThan(1000); // Less than 1 second difference expect(result.getTime()).toBeLessThanOrEqual(expectedMax);
}); });
it('should create expiration date with custom retention period', () => { it('should create expiration date with custom retention period', () => {
@ -109,14 +111,16 @@ describe('tempChatRetention', () => {
}, },
}; };
const beforeCall = Date.now();
const result = createTempChatExpirationDate(config?.interfaceConfig); const result = createTempChatExpirationDate(config?.interfaceConfig);
const afterCall = Date.now();
const expectedDate = new Date(); const expectedMin = beforeCall + 12 * 60 * 60 * 1000;
expectedDate.setHours(expectedDate.getHours() + 12); const expectedMax = afterCall + 12 * 60 * 60 * 1000;
// Allow for small time differences in test execution // Result should be between expectedMin and expectedMax
const timeDiff = Math.abs(result.getTime() - expectedDate.getTime()); expect(result.getTime()).toBeGreaterThanOrEqual(expectedMin);
expect(timeDiff).toBeLessThan(1000); // Less than 1 second difference expect(result.getTime()).toBeLessThanOrEqual(expectedMax);
}); });
it('should return a Date object', () => { it('should return a Date object', () => {

View file

@ -73,7 +73,5 @@ export function getTempChatRetentionHours(
*/ */
export function createTempChatExpirationDate(interfaceConfig?: AppConfig['interfaceConfig']): Date { export function createTempChatExpirationDate(interfaceConfig?: AppConfig['interfaceConfig']): Date {
const retentionHours = getTempChatRetentionHours(interfaceConfig); const retentionHours = getTempChatRetentionHours(interfaceConfig);
const expiredAt = new Date(); return new Date(Date.now() + retentionHours * 60 * 60 * 1000);
expiredAt.setHours(expiredAt.getHours() + retentionHours);
return expiredAt;
} }