mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-09-22 06:00:56 +02:00
🛡️ chore: address several npm vulnerabilities (#4151)
* chore: bump express to 4.21.0 to address CVE-2024-45590 and CVE-2024-43796 * chore: npm audit fix * chore: uninstall unused `ws` dependency * chore: bump nodemailer to 6.9.15 * chore: bump mongoose to v7.3.3 * chore: bump lint-staged for micromatch upgrade * chore: bump axios to 1.7.7 * chore: npm audit fix for mongodb/mongoose vulns
This commit is contained in:
parent
f7341336dd
commit
94d1afee84
7 changed files with 770 additions and 686 deletions
|
@ -44,7 +44,7 @@
|
|||
"@langchain/google-genai": "^0.0.11",
|
||||
"@langchain/google-vertexai": "^0.0.17",
|
||||
"@librechat/agents": "^1.5.2",
|
||||
"axios": "^1.3.4",
|
||||
"axios": "^1.7.7",
|
||||
"bcryptjs": "^2.4.3",
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"cohere-ai": "^7.9.1",
|
||||
|
@ -55,7 +55,7 @@
|
|||
"cors": "^2.8.5",
|
||||
"dedent": "^1.5.3",
|
||||
"dotenv": "^16.0.3",
|
||||
"express": "^4.18.2",
|
||||
"express": "^4.21.0",
|
||||
"express-mongo-sanitize": "^2.2.0",
|
||||
"express-rate-limit": "^6.9.0",
|
||||
"express-session": "^1.17.3",
|
||||
|
@ -76,11 +76,11 @@
|
|||
"meilisearch": "^0.38.0",
|
||||
"mime": "^3.0.0",
|
||||
"module-alias": "^2.2.3",
|
||||
"mongoose": "^7.1.1",
|
||||
"mongoose": "^7.3.3",
|
||||
"multer": "^1.4.5-lts.1",
|
||||
"nanoid": "^3.3.7",
|
||||
"nodejs-gpt": "^1.37.4",
|
||||
"nodemailer": "^6.9.4",
|
||||
"nodemailer": "^6.9.15",
|
||||
"ollama": "^0.5.0",
|
||||
"openai": "^4.47.1",
|
||||
"openai-chat-tokens": "^0.2.8",
|
||||
|
@ -101,7 +101,6 @@
|
|||
"ua-parser-js": "^1.0.36",
|
||||
"winston": "^3.11.0",
|
||||
"winston-daily-rotate-file": "^4.7.1",
|
||||
"ws": "^8.17.0",
|
||||
"zod": "^3.22.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
const WebSocket = require('ws');
|
||||
const { CacheKeys, findLastSeparatorIndex, SEPARATORS } = require('librechat-data-provider');
|
||||
const { getLogStores } = require('~/cache');
|
||||
|
||||
|
@ -44,33 +43,6 @@ function getRandomVoiceId(voiceIds) {
|
|||
* @property {string[]} normalizedAlignment.chars
|
||||
*/
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Record<string, unknown | undefined>} parameters
|
||||
* @returns
|
||||
*/
|
||||
function assembleQuery(parameters) {
|
||||
let query = '';
|
||||
let hasQuestionMark = false;
|
||||
|
||||
for (const [key, value] of Object.entries(parameters)) {
|
||||
if (value == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!hasQuestionMark) {
|
||||
query += '?';
|
||||
hasQuestionMark = true;
|
||||
} else {
|
||||
query += '&';
|
||||
}
|
||||
|
||||
query += `${key}=${value}`;
|
||||
}
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
const MAX_NOT_FOUND_COUNT = 6;
|
||||
const MAX_NO_CHANGE_COUNT = 10;
|
||||
|
||||
|
@ -197,144 +169,6 @@ function splitTextIntoChunks(text, chunkSize = 4000) {
|
|||
return chunks;
|
||||
}
|
||||
|
||||
/**
|
||||
* Input stream text to speech
|
||||
* @param {Express.Response} res
|
||||
* @param {AsyncIterable<string>} textStream
|
||||
* @param {(token: string) => Promise<boolean>} callback - Whether to continue the stream or not
|
||||
* @returns {AsyncGenerator<AudioChunk>}
|
||||
*/
|
||||
function inputStreamTextToSpeech(res, textStream, callback) {
|
||||
const model = 'eleven_monolingual_v1';
|
||||
const wsUrl = `wss://api.elevenlabs.io/v1/text-to-speech/${getRandomVoiceId()}/stream-input${assembleQuery(
|
||||
{
|
||||
model_id: model,
|
||||
// flush: true,
|
||||
// optimize_streaming_latency: this.settings.optimizeStreamingLatency,
|
||||
optimize_streaming_latency: 1,
|
||||
// output_format: this.settings.outputFormat,
|
||||
},
|
||||
)}`;
|
||||
const socket = new WebSocket(wsUrl);
|
||||
|
||||
socket.onopen = function () {
|
||||
const streamStart = {
|
||||
text: ' ',
|
||||
voice_settings: {
|
||||
stability: 0.5,
|
||||
similarity_boost: 0.8,
|
||||
},
|
||||
xi_api_key: process.env.ELEVENLABS_API_KEY,
|
||||
// generation_config: { chunk_length_schedule: [50, 90, 120, 150, 200] },
|
||||
};
|
||||
|
||||
socket.send(JSON.stringify(streamStart));
|
||||
|
||||
// send stream until done
|
||||
const streamComplete = new Promise((resolve, reject) => {
|
||||
(async () => {
|
||||
let textBuffer = '';
|
||||
let shouldContinue = true;
|
||||
for await (const textDelta of textStream) {
|
||||
textBuffer += textDelta;
|
||||
|
||||
// using ". " as separator: sending in full sentences improves the quality
|
||||
// of the audio output significantly.
|
||||
const separatorIndex = findLastSeparatorIndex(textBuffer);
|
||||
|
||||
// Callback for textStream (will return false if signal is aborted)
|
||||
shouldContinue = await callback(textDelta);
|
||||
|
||||
if (separatorIndex === -1) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!shouldContinue) {
|
||||
break;
|
||||
}
|
||||
|
||||
const textToProcess = textBuffer.slice(0, separatorIndex);
|
||||
textBuffer = textBuffer.slice(separatorIndex + 1);
|
||||
|
||||
const request = {
|
||||
text: textToProcess,
|
||||
try_trigger_generation: true,
|
||||
};
|
||||
|
||||
socket.send(JSON.stringify(request));
|
||||
}
|
||||
|
||||
// send remaining text:
|
||||
if (shouldContinue && textBuffer.length > 0) {
|
||||
socket.send(
|
||||
JSON.stringify({
|
||||
text: `${textBuffer} `, // append space
|
||||
try_trigger_generation: true,
|
||||
}),
|
||||
);
|
||||
}
|
||||
})()
|
||||
.then(resolve)
|
||||
.catch(reject);
|
||||
});
|
||||
|
||||
streamComplete
|
||||
.then(() => {
|
||||
const endStream = {
|
||||
text: '',
|
||||
};
|
||||
|
||||
socket.send(JSON.stringify(endStream));
|
||||
})
|
||||
.catch((e) => {
|
||||
console.error('Error streaming text to speech:', e);
|
||||
throw e;
|
||||
});
|
||||
};
|
||||
|
||||
return (async function* audioStream() {
|
||||
let isDone = false;
|
||||
let chunks = [];
|
||||
let resolve;
|
||||
let waitForMessage = new Promise((r) => (resolve = r));
|
||||
|
||||
socket.onmessage = function (event) {
|
||||
// console.log(event);
|
||||
const audioChunk = JSON.parse(event.data);
|
||||
if (audioChunk.audio && audioChunk.alignment) {
|
||||
res.write(`event: audio\ndata: ${event.data}\n\n`);
|
||||
chunks.push(audioChunk);
|
||||
resolve(null);
|
||||
waitForMessage = new Promise((r) => (resolve = r));
|
||||
} else if (audioChunk.isFinal) {
|
||||
isDone = true;
|
||||
resolve(null);
|
||||
} else if (audioChunk.message) {
|
||||
console.warn('Received Elevenlabs message:', audioChunk.message);
|
||||
resolve(null);
|
||||
}
|
||||
};
|
||||
|
||||
socket.onerror = function (error) {
|
||||
console.error('WebSocket error:', error);
|
||||
// throw error;
|
||||
};
|
||||
|
||||
socket.onclose = function () {
|
||||
isDone = true;
|
||||
resolve(null);
|
||||
};
|
||||
|
||||
while (!isDone) {
|
||||
await waitForMessage;
|
||||
yield* chunks;
|
||||
chunks = [];
|
||||
}
|
||||
|
||||
res.write('event: end\ndata: \n\n');
|
||||
})();
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {AsyncIterable<string>} llmStream
|
||||
|
@ -349,7 +183,6 @@ async function* llmMessageSource(llmStream) {
|
|||
}
|
||||
|
||||
module.exports = {
|
||||
inputStreamTextToSpeech,
|
||||
findLastSeparatorIndex,
|
||||
createChunkProcessor,
|
||||
splitTextIntoChunks,
|
||||
|
|
|
@ -54,7 +54,7 @@
|
|||
"@tanstack/react-query": "^4.28.0",
|
||||
"@tanstack/react-table": "^8.11.7",
|
||||
"@zattoo/use-double-click": "1.2.0",
|
||||
"axios": "^1.3.4",
|
||||
"axios": "^1.7.7",
|
||||
"class-variance-authority": "^0.6.0",
|
||||
"clsx": "^1.2.1",
|
||||
"copy-to-clipboard": "^3.3.3",
|
||||
|
|
1272
package-lock.json
generated
1272
package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -93,7 +93,7 @@
|
|||
"eslint-plugin-react-hooks": "^4.6.0",
|
||||
"husky": "^8.0.0",
|
||||
"jest": "^29.5.0",
|
||||
"lint-staged": "^13.2.2",
|
||||
"lint-staged": "^15.2.10",
|
||||
"prettier": "^2.8.8",
|
||||
"prettier-eslint": "^15.0.1",
|
||||
"prettier-eslint-cli": "^7.1.0",
|
||||
|
|
|
@ -40,7 +40,7 @@
|
|||
"homepage": "https://librechat.ai",
|
||||
"dependencies": {
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
"axios": "^1.3.4",
|
||||
"axios": "^1.7.7",
|
||||
"js-yaml": "^4.1.0",
|
||||
"openai": "4.11.1",
|
||||
"openapi-types": "^12.1.3",
|
||||
|
|
|
@ -5,6 +5,6 @@
|
|||
"module": "./index.es.js",
|
||||
"types": "../dist/types/react-query/index.d.ts",
|
||||
"dependencies": {
|
||||
"axios": "^1.3.4"
|
||||
"axios": "^1.7.7"
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue