🏄‍♂️ refactor: Optimize Reasoning UI & Token Streaming (#5546)

*  feat: Implement Show Thinking feature; refactor: testing thinking render optimizations

*  feat: Refactor Thinking component styles and enhance Markdown rendering

* chore: add back removed code, revert type changes

* chore: Add back resetCounter effect to Markdown component for improved code block indexing

* chore: bump @librechat/agents and google langchain packages

* WIP: reasoning type updates

* WIP: first pass, reasoning content blocks

* chore: revert code

* chore: bump @librechat/agents

* refactor: optimize reasoning tag handling

* style: ul indent padding

* feat: add Reasoning component to handle reasoning display

* feat: first pass, content reasoning part styling

* refactor: add content placeholder for endpoints using new stream handler

* refactor: only cache messages when requesting stream audio

* fix: circular dep.

* fix: add default param

* refactor: tts, only request after message stream, fix chrome autoplay

* style: update label for submitting state and add localization for 'Thinking...'

* fix: improve global audio pause logic and reset active run ID

* fix: handle artifact edge cases

* fix: remove unnecessary console log from artifact update test

* feat: add support for continued message handling with new streaming method

---------

Co-authored-by: Marco Beretta <81851188+berry-13@users.noreply.github.com>
This commit is contained in:
Danny Avila 2025-01-29 19:46:58 -05:00 committed by GitHub
parent d60a149ad9
commit 591a019766
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
48 changed files with 1791 additions and 726 deletions

View file

@ -7,15 +7,12 @@ const {
EModelEndpoint, EModelEndpoint,
ErrorTypes, ErrorTypes,
Constants, Constants,
CacheKeys,
Time,
} = require('librechat-data-provider'); } = require('librechat-data-provider');
const { getMessages, saveMessage, updateMessage, saveConvo } = require('~/models'); const { getMessages, saveMessage, updateMessage, saveConvo } = require('~/models');
const { addSpaceIfNeeded, isEnabled } = require('~/server/utils'); const { addSpaceIfNeeded, isEnabled } = require('~/server/utils');
const { truncateToolCallOutputs } = require('./prompts'); const { truncateToolCallOutputs } = require('./prompts');
const checkBalance = require('~/models/checkBalance'); const checkBalance = require('~/models/checkBalance');
const { getFiles } = require('~/models/File'); const { getFiles } = require('~/models/File');
const { getLogStores } = require('~/cache');
const TextStream = require('./TextStream'); const TextStream = require('./TextStream');
const { logger } = require('~/config'); const { logger } = require('~/config');
@ -54,6 +51,12 @@ class BaseClient {
this.outputTokensKey = 'completion_tokens'; this.outputTokensKey = 'completion_tokens';
/** @type {Set<string>} */ /** @type {Set<string>} */
this.savedMessageIds = new Set(); this.savedMessageIds = new Set();
/**
* Flag to determine if the client re-submitted the latest assistant message.
* @type {boolean | undefined} */
this.continued;
/** @type {TMessage[]} */
this.currentMessages = [];
} }
setOptions() { setOptions() {
@ -589,6 +592,7 @@ class BaseClient {
} else { } else {
latestMessage.text = generation; latestMessage.text = generation;
} }
this.continued = true;
} else { } else {
this.currentMessages.push(userMessage); this.currentMessages.push(userMessage);
} }
@ -720,17 +724,6 @@ class BaseClient {
this.responsePromise = this.saveMessageToDatabase(responseMessage, saveOptions, user); this.responsePromise = this.saveMessageToDatabase(responseMessage, saveOptions, user);
this.savedMessageIds.add(responseMessage.messageId); this.savedMessageIds.add(responseMessage.messageId);
if (responseMessage.text) {
const messageCache = getLogStores(CacheKeys.MESSAGES);
messageCache.set(
responseMessageId,
{
text: responseMessage.text,
complete: true,
},
Time.FIVE_MINUTES,
);
}
delete responseMessage.tokenCount; delete responseMessage.tokenCount;
return responseMessage; return responseMessage;
} }

View file

@ -1,6 +1,7 @@
const OpenAI = require('openai'); const OpenAI = require('openai');
const { OllamaClient } = require('./OllamaClient'); const { OllamaClient } = require('./OllamaClient');
const { HttpsProxyAgent } = require('https-proxy-agent'); const { HttpsProxyAgent } = require('https-proxy-agent');
const { SplitStreamHandler, GraphEvents } = require('@librechat/agents');
const { const {
Constants, Constants,
ImageDetail, ImageDetail,
@ -28,17 +29,17 @@ const {
createContextHandlers, createContextHandlers,
} = require('./prompts'); } = require('./prompts');
const { encodeAndFormat } = require('~/server/services/Files/images/encode'); const { encodeAndFormat } = require('~/server/services/Files/images/encode');
const { addSpaceIfNeeded, isEnabled, sleep } = require('~/server/utils');
const Tokenizer = require('~/server/services/Tokenizer'); const Tokenizer = require('~/server/services/Tokenizer');
const { spendTokens } = require('~/models/spendTokens'); const { spendTokens } = require('~/models/spendTokens');
const { isEnabled, sleep } = require('~/server/utils');
const { handleOpenAIErrors } = require('./tools/util'); const { handleOpenAIErrors } = require('./tools/util');
const { createLLM, RunManager } = require('./llm'); const { createLLM, RunManager } = require('./llm');
const { logger, sendEvent } = require('~/config');
const ChatGPTClient = require('./ChatGPTClient'); const ChatGPTClient = require('./ChatGPTClient');
const { summaryBuffer } = require('./memory'); const { summaryBuffer } = require('./memory');
const { runTitleChain } = require('./chains'); const { runTitleChain } = require('./chains');
const { tokenSplit } = require('./document'); const { tokenSplit } = require('./document');
const BaseClient = require('./BaseClient'); const BaseClient = require('./BaseClient');
const { logger } = require('~/config');
class OpenAIClient extends BaseClient { class OpenAIClient extends BaseClient {
constructor(apiKey, options = {}) { constructor(apiKey, options = {}) {
@ -65,6 +66,8 @@ class OpenAIClient extends BaseClient {
this.usage; this.usage;
/** @type {boolean|undefined} */ /** @type {boolean|undefined} */
this.isO1Model; this.isO1Model;
/** @type {SplitStreamHandler | undefined} */
this.streamHandler;
} }
// TODO: PluginsClient calls this 3x, unneeded // TODO: PluginsClient calls this 3x, unneeded
@ -1064,11 +1067,36 @@ ${convo}
}); });
} }
getStreamText() {
if (!this.streamHandler) {
return '';
}
const reasoningTokens =
this.streamHandler.reasoningTokens.length > 0
? `:::thinking\n${this.streamHandler.reasoningTokens.join('')}\n:::\n`
: '';
return `${reasoningTokens}${this.streamHandler.tokens.join('')}`;
}
getMessageMapMethod() {
/**
* @param {TMessage} msg
*/
return (msg) => {
if (msg.text != null && msg.text && msg.text.startsWith(':::thinking')) {
msg.text = msg.text.replace(/:::thinking.*?:::/gs, '').trim();
}
return msg;
};
}
async chatCompletion({ payload, onProgress, abortController = null }) { async chatCompletion({ payload, onProgress, abortController = null }) {
let error = null; let error = null;
let intermediateReply = [];
const errorCallback = (err) => (error = err); const errorCallback = (err) => (error = err);
const intermediateReply = [];
const reasoningTokens = [];
try { try {
if (!abortController) { if (!abortController) {
abortController = new AbortController(); abortController = new AbortController();
@ -1266,6 +1294,19 @@ ${convo}
reasoningKey = 'reasoning'; reasoningKey = 'reasoning';
} }
this.streamHandler = new SplitStreamHandler({
reasoningKey,
accumulate: true,
runId: this.responseMessageId,
handlers: {
[GraphEvents.ON_RUN_STEP]: (event) => sendEvent(this.options.res, event),
[GraphEvents.ON_MESSAGE_DELTA]: (event) => sendEvent(this.options.res, event),
[GraphEvents.ON_REASONING_DELTA]: (event) => sendEvent(this.options.res, event),
},
});
intermediateReply = this.streamHandler.tokens;
if (modelOptions.stream) { if (modelOptions.stream) {
streamPromise = new Promise((resolve) => { streamPromise = new Promise((resolve) => {
streamResolve = resolve; streamResolve = resolve;
@ -1292,41 +1333,36 @@ ${convo}
} }
if (typeof finalMessage.content !== 'string' || finalMessage.content.trim() === '') { if (typeof finalMessage.content !== 'string' || finalMessage.content.trim() === '') {
finalChatCompletion.choices[0].message.content = intermediateReply.join(''); finalChatCompletion.choices[0].message.content = this.streamHandler.tokens.join('');
} }
}) })
.on('finalMessage', (message) => { .on('finalMessage', (message) => {
if (message?.role !== 'assistant') { if (message?.role !== 'assistant') {
stream.messages.push({ role: 'assistant', content: intermediateReply.join('') }); stream.messages.push({
role: 'assistant',
content: this.streamHandler.tokens.join(''),
});
UnexpectedRoleError = true; UnexpectedRoleError = true;
} }
}); });
let reasoningCompleted = false; if (this.continued === true) {
const latestText = addSpaceIfNeeded(
this.currentMessages[this.currentMessages.length - 1]?.text ?? '',
);
this.streamHandler.handle({
choices: [
{
delta: {
content: latestText,
},
},
],
});
}
for await (const chunk of stream) { for await (const chunk of stream) {
if (chunk?.choices?.[0]?.delta?.[reasoningKey]) { this.streamHandler.handle(chunk);
if (reasoningTokens.length === 0) {
const thinkingDirective = '<think>\n';
intermediateReply.push(thinkingDirective);
reasoningTokens.push(thinkingDirective);
onProgress(thinkingDirective);
}
const reasoning_content = chunk?.choices?.[0]?.delta?.[reasoningKey] || '';
intermediateReply.push(reasoning_content);
reasoningTokens.push(reasoning_content);
onProgress(reasoning_content);
}
const token = chunk?.choices?.[0]?.delta?.content || '';
if (!reasoningCompleted && reasoningTokens.length > 0 && token) {
reasoningCompleted = true;
const separatorTokens = '\n</think>\n';
reasoningTokens.push(separatorTokens);
onProgress(separatorTokens);
}
intermediateReply.push(token);
onProgress(token);
if (abortController.signal.aborted) { if (abortController.signal.aborted) {
stream.controller.abort(); stream.controller.abort();
break; break;
@ -1369,7 +1405,7 @@ ${convo}
if (!Array.isArray(choices) || choices.length === 0) { if (!Array.isArray(choices) || choices.length === 0) {
logger.warn('[OpenAIClient] Chat completion response has no choices'); logger.warn('[OpenAIClient] Chat completion response has no choices');
return intermediateReply.join(''); return this.streamHandler.tokens.join('');
} }
const { message, finish_reason } = choices[0] ?? {}; const { message, finish_reason } = choices[0] ?? {};
@ -1379,11 +1415,11 @@ ${convo}
if (!message) { if (!message) {
logger.warn('[OpenAIClient] Message is undefined in chatCompletion response'); logger.warn('[OpenAIClient] Message is undefined in chatCompletion response');
return intermediateReply.join(''); return this.streamHandler.tokens.join('');
} }
if (typeof message.content !== 'string' || message.content.trim() === '') { if (typeof message.content !== 'string' || message.content.trim() === '') {
const reply = intermediateReply.join(''); const reply = this.streamHandler.tokens.join('');
logger.debug( logger.debug(
'[OpenAIClient] chatCompletion: using intermediateReply due to empty message.content', '[OpenAIClient] chatCompletion: using intermediateReply due to empty message.content',
{ intermediateReply: reply }, { intermediateReply: reply },
@ -1391,8 +1427,18 @@ ${convo}
return reply; return reply;
} }
if (reasoningTokens.length > 0 && this.options.context !== 'title') { if (
return reasoningTokens.join('') + message.content; this.streamHandler.reasoningTokens.length > 0 &&
this.options.context !== 'title' &&
!message.content.startsWith('<think>')
) {
return this.getStreamText();
} else if (
this.streamHandler.reasoningTokens.length > 0 &&
this.options.context !== 'title' &&
message.content.startsWith('<think>')
) {
return message.content.replace('<think>', ':::thinking').replace('</think>', ':::');
} }
return message.content; return message.content;

View file

@ -1,5 +1,4 @@
const OpenAIClient = require('./OpenAIClient'); const OpenAIClient = require('./OpenAIClient');
const { CacheKeys, Time } = require('librechat-data-provider');
const { CallbackManager } = require('@langchain/core/callbacks/manager'); const { CallbackManager } = require('@langchain/core/callbacks/manager');
const { BufferMemory, ChatMessageHistory } = require('langchain/memory'); const { BufferMemory, ChatMessageHistory } = require('langchain/memory');
const { addImages, buildErrorInput, buildPromptPrefix } = require('./output_parsers'); const { addImages, buildErrorInput, buildPromptPrefix } = require('./output_parsers');
@ -11,7 +10,6 @@ const checkBalance = require('~/models/checkBalance');
const { isEnabled } = require('~/server/utils'); const { isEnabled } = require('~/server/utils');
const { extractBaseURL } = require('~/utils'); const { extractBaseURL } = require('~/utils');
const { loadTools } = require('./tools/util'); const { loadTools } = require('./tools/util');
const { getLogStores } = require('~/cache');
const { logger } = require('~/config'); const { logger } = require('~/config');
class PluginsClient extends OpenAIClient { class PluginsClient extends OpenAIClient {
@ -256,17 +254,6 @@ class PluginsClient extends OpenAIClient {
} }
this.responsePromise = this.saveMessageToDatabase(responseMessage, saveOptions, user); this.responsePromise = this.saveMessageToDatabase(responseMessage, saveOptions, user);
if (responseMessage.text) {
const messageCache = getLogStores(CacheKeys.MESSAGES);
messageCache.set(
responseMessage.messageId,
{
text: responseMessage.text,
complete: true,
},
Time.FIVE_MINUTES,
);
}
delete responseMessage.tokenCount; delete responseMessage.tokenCount;
return { ...responseMessage, ...result }; return { ...responseMessage, ...result };
} }

View file

@ -16,7 +16,22 @@ async function getMCPManager() {
return mcpManager; return mcpManager;
} }
/**
* Sends message data in Server Sent Events format.
* @param {ServerResponse} res - The server response.
* @param {{ data: string | Record<string, unknown>, event?: string }} event - The message event.
* @param {string} event.event - The type of event.
* @param {string} event.data - The message to be sent.
*/
const sendEvent = (res, event) => {
if (typeof event.data === 'string' && event.data.length === 0) {
return;
}
res.write(`event: message\ndata: ${JSON.stringify(event)}\n\n`);
};
module.exports = { module.exports = {
logger, logger,
sendEvent,
getMCPManager, getMCPManager,
}; };

View file

@ -41,10 +41,10 @@
"@keyv/redis": "^2.8.1", "@keyv/redis": "^2.8.1",
"@langchain/community": "^0.3.14", "@langchain/community": "^0.3.14",
"@langchain/core": "^0.3.18", "@langchain/core": "^0.3.18",
"@langchain/google-genai": "^0.1.6", "@langchain/google-genai": "^0.1.7",
"@langchain/google-vertexai": "^0.1.6", "@langchain/google-vertexai": "^0.1.8",
"@langchain/textsplitters": "^0.1.0", "@langchain/textsplitters": "^0.1.0",
"@librechat/agents": "^1.9.94", "@librechat/agents": "^1.9.97",
"@waylaidwanderer/fetch-event-source": "^3.0.1", "@waylaidwanderer/fetch-event-source": "^3.0.1",
"axios": "^1.7.7", "axios": "^1.7.7",
"bcryptjs": "^2.4.3", "bcryptjs": "^2.4.3",

View file

@ -1,8 +1,6 @@
const throttle = require('lodash/throttle'); const { getResponseSender, Constants } = require('librechat-data-provider');
const { getResponseSender, Constants, CacheKeys, Time } = require('librechat-data-provider');
const { createAbortController, handleAbortError } = require('~/server/middleware'); const { createAbortController, handleAbortError } = require('~/server/middleware');
const { sendMessage, createOnProgress } = require('~/server/utils'); const { sendMessage, createOnProgress } = require('~/server/utils');
const { getLogStores } = require('~/cache');
const { saveMessage } = require('~/models'); const { saveMessage } = require('~/models');
const { logger } = require('~/config'); const { logger } = require('~/config');
@ -57,33 +55,9 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
try { try {
const { client } = await initializeClient({ req, res, endpointOption }); const { client } = await initializeClient({ req, res, endpointOption });
const messageCache = getLogStores(CacheKeys.MESSAGES); const { onProgress: progressCallback, getPartialText } = createOnProgress();
const { onProgress: progressCallback, getPartialText } = createOnProgress({
onProgress: throttle(
({ text: partialText }) => {
/*
const unfinished = endpointOption.endpoint === EModelEndpoint.google ? false : true;
messageCache.set(responseMessageId, {
messageId: responseMessageId,
sender,
conversationId,
parentMessageId: overrideParentMessageId ?? userMessageId,
text: partialText,
model: client.modelOptions.model,
unfinished,
error: false,
user,
}, Time.FIVE_MINUTES);
*/
messageCache.set(responseMessageId, partialText, Time.FIVE_MINUTES); getText = client.getStreamText != null ? client.getStreamText.bind(client) : getPartialText;
},
3000,
{ trailing: false },
),
});
getText = getPartialText;
const getAbortData = () => ({ const getAbortData = () => ({
sender, sender,
@ -91,7 +65,7 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
userMessagePromise, userMessagePromise,
messageId: responseMessageId, messageId: responseMessageId,
parentMessageId: overrideParentMessageId ?? userMessageId, parentMessageId: overrideParentMessageId ?? userMessageId,
text: getPartialText(), text: getText(),
userMessage, userMessage,
promptTokens, promptTokens,
}); });

View file

@ -1,8 +1,6 @@
const throttle = require('lodash/throttle'); const { getResponseSender } = require('librechat-data-provider');
const { getResponseSender, CacheKeys, Time } = require('librechat-data-provider');
const { createAbortController, handleAbortError } = require('~/server/middleware'); const { createAbortController, handleAbortError } = require('~/server/middleware');
const { sendMessage, createOnProgress } = require('~/server/utils'); const { sendMessage, createOnProgress } = require('~/server/utils');
const { getLogStores } = require('~/cache');
const { saveMessage } = require('~/models'); const { saveMessage } = require('~/models');
const { logger } = require('~/config'); const { logger } = require('~/config');
@ -53,62 +51,44 @@ const EditController = async (req, res, next, initializeClient) => {
} }
}; };
const messageCache = getLogStores(CacheKeys.MESSAGES);
const { onProgress: progressCallback, getPartialText } = createOnProgress({ const { onProgress: progressCallback, getPartialText } = createOnProgress({
generation, generation,
onProgress: throttle(
({ text: partialText }) => {
/*
const unfinished = endpointOption.endpoint === EModelEndpoint.google ? false : true;
{
messageId: responseMessageId,
sender,
conversationId,
parentMessageId: overrideParentMessageId ?? userMessageId,
text: partialText,
model: endpointOption.modelOptions.model,
unfinished,
isEdited: true,
error: false,
user,
} */
messageCache.set(responseMessageId, partialText, Time.FIVE_MINUTES);
},
3000,
{ trailing: false },
),
}); });
const getAbortData = () => ({ let getText;
conversationId,
userMessagePromise,
messageId: responseMessageId,
sender,
parentMessageId: overrideParentMessageId ?? userMessageId,
text: getPartialText(),
userMessage,
promptTokens,
});
const { abortController, onStart } = createAbortController(req, res, getAbortData, getReqData);
res.on('close', () => {
logger.debug('[EditController] Request closed');
if (!abortController) {
return;
} else if (abortController.signal.aborted) {
return;
} else if (abortController.requestCompleted) {
return;
}
abortController.abort();
logger.debug('[EditController] Request aborted on close');
});
try { try {
const { client } = await initializeClient({ req, res, endpointOption }); const { client } = await initializeClient({ req, res, endpointOption });
getText = client.getStreamText != null ? client.getStreamText.bind(client) : getPartialText;
const getAbortData = () => ({
conversationId,
userMessagePromise,
messageId: responseMessageId,
sender,
parentMessageId: overrideParentMessageId ?? userMessageId,
text: getText(),
userMessage,
promptTokens,
});
const { abortController, onStart } = createAbortController(req, res, getAbortData, getReqData);
res.on('close', () => {
logger.debug('[EditController] Request closed');
if (!abortController) {
return;
} else if (abortController.signal.aborted) {
return;
} else if (abortController.requestCompleted) {
return;
}
abortController.abort();
logger.debug('[EditController] Request aborted on close');
});
let response = await client.sendMessage(text, { let response = await client.sendMessage(text, {
user, user,
generation, generation,
@ -153,7 +133,7 @@ const EditController = async (req, res, next, initializeClient) => {
); );
} }
} catch (error) { } catch (error) {
const partialText = getPartialText(); const partialText = getText();
handleAbortError(res, req, error, { handleAbortError(res, req, error, {
partialText, partialText,
conversationId, conversationId,

View file

@ -10,7 +10,7 @@ const {
const { processCodeOutput } = require('~/server/services/Files/Code/process'); const { processCodeOutput } = require('~/server/services/Files/Code/process');
const { saveBase64Image } = require('~/server/services/Files/process'); const { saveBase64Image } = require('~/server/services/Files/process');
const { loadAuthValues } = require('~/app/clients/tools/util'); const { loadAuthValues } = require('~/app/clients/tools/util');
const { logger } = require('~/config'); const { logger, sendEvent } = require('~/config');
/** @typedef {import('@librechat/agents').Graph} Graph */ /** @typedef {import('@librechat/agents').Graph} Graph */
/** @typedef {import('@librechat/agents').EventHandler} EventHandler */ /** @typedef {import('@librechat/agents').EventHandler} EventHandler */
@ -21,20 +21,6 @@ const { logger } = require('~/config');
/** @typedef {import('@librechat/agents').ContentAggregatorResult['aggregateContent']} ContentAggregator */ /** @typedef {import('@librechat/agents').ContentAggregatorResult['aggregateContent']} ContentAggregator */
/** @typedef {import('@librechat/agents').GraphEvents} GraphEvents */ /** @typedef {import('@librechat/agents').GraphEvents} GraphEvents */
/**
* Sends message data in Server Sent Events format.
* @param {ServerResponse} res - The server response.
* @param {{ data: string | Record<string, unknown>, event?: string }} event - The message event.
* @param {string} event.event - The type of event.
* @param {string} event.data - The message to be sent.
*/
const sendEvent = (res, event) => {
if (typeof event.data === 'string' && event.data.length === 0) {
return;
}
res.write(`event: message\ndata: ${JSON.stringify(event)}\n\n`);
};
class ModelEndHandler { class ModelEndHandler {
/** /**
* @param {Array<UsageMetadata>} collectedUsage * @param {Array<UsageMetadata>} collectedUsage
@ -322,7 +308,6 @@ function createToolEndCallback({ req, res, artifactPromises }) {
} }
module.exports = { module.exports = {
sendEvent,
getDefaultHandlers, getDefaultHandlers,
createToolEndCallback, createToolEndCallback,
}; };

View file

@ -397,18 +397,6 @@ const chatV2 = async (req, res) => {
response = streamRunManager; response = streamRunManager;
response.text = streamRunManager.intermediateText; response.text = streamRunManager.intermediateText;
if (response.text) {
const messageCache = getLogStores(CacheKeys.MESSAGES);
messageCache.set(
responseMessageId,
{
complete: true,
text: response.text,
},
Time.FIVE_MINUTES,
);
}
}; };
await processRun(); await processRun();

View file

@ -1,11 +1,9 @@
const express = require('express'); const express = require('express');
const throttle = require('lodash/throttle'); const { getResponseSender, Constants } = require('librechat-data-provider');
const { getResponseSender, Constants, CacheKeys, Time } = require('librechat-data-provider');
const { initializeClient } = require('~/server/services/Endpoints/gptPlugins'); const { initializeClient } = require('~/server/services/Endpoints/gptPlugins');
const { sendMessage, createOnProgress } = require('~/server/utils'); const { sendMessage, createOnProgress } = require('~/server/utils');
const { addTitle } = require('~/server/services/Endpoints/openAI'); const { addTitle } = require('~/server/services/Endpoints/openAI');
const { saveMessage, updateMessage } = require('~/models'); const { saveMessage, updateMessage } = require('~/models');
const { getLogStores } = require('~/cache');
const { const {
handleAbort, handleAbort,
createAbortController, createAbortController,
@ -72,15 +70,6 @@ router.post(
} }
}; };
const messageCache = getLogStores(CacheKeys.MESSAGES);
const throttledCacheSet = throttle(
(text) => {
messageCache.set(responseMessageId, text, Time.FIVE_MINUTES);
},
3000,
{ trailing: false },
);
let streaming = null; let streaming = null;
let timer = null; let timer = null;
@ -89,13 +78,11 @@ router.post(
sendIntermediateMessage, sendIntermediateMessage,
getPartialText, getPartialText,
} = createOnProgress({ } = createOnProgress({
onProgress: ({ text: partialText }) => { onProgress: () => {
if (timer) { if (timer) {
clearTimeout(timer); clearTimeout(timer);
} }
throttledCacheSet(partialText);
streaming = new Promise((resolve) => { streaming = new Promise((resolve) => {
timer = setTimeout(() => { timer = setTimeout(() => {
resolve(); resolve();

View file

@ -1,6 +1,5 @@
const express = require('express'); const express = require('express');
const throttle = require('lodash/throttle'); const { getResponseSender } = require('librechat-data-provider');
const { getResponseSender, CacheKeys, Time } = require('librechat-data-provider');
const { const {
setHeaders, setHeaders,
handleAbort, handleAbort,
@ -14,7 +13,6 @@ const {
const { sendMessage, createOnProgress, formatSteps, formatAction } = require('~/server/utils'); const { sendMessage, createOnProgress, formatSteps, formatAction } = require('~/server/utils');
const { initializeClient } = require('~/server/services/Endpoints/gptPlugins'); const { initializeClient } = require('~/server/services/Endpoints/gptPlugins');
const { saveMessage, updateMessage } = require('~/models'); const { saveMessage, updateMessage } = require('~/models');
const { getLogStores } = require('~/cache');
const { validateTools } = require('~/app'); const { validateTools } = require('~/app');
const { logger } = require('~/config'); const { logger } = require('~/config');
@ -80,26 +78,16 @@ router.post(
} }
}; };
const messageCache = getLogStores(CacheKeys.MESSAGES);
const throttledCacheSet = throttle(
(text) => {
messageCache.set(responseMessageId, text, Time.FIVE_MINUTES);
},
3000,
{ trailing: false },
);
const { const {
onProgress: progressCallback, onProgress: progressCallback,
sendIntermediateMessage, sendIntermediateMessage,
getPartialText, getPartialText,
} = createOnProgress({ } = createOnProgress({
generation, generation,
onProgress: ({ text: partialText }) => { onProgress: () => {
if (plugin.loading === true) { if (plugin.loading === true) {
plugin.loading = false; plugin.loading = false;
} }
throttledCacheSet(partialText);
}, },
}); });

View file

@ -21,7 +21,7 @@ router.post('/artifact/:messageId', async (req, res) => {
const { messageId } = req.params; const { messageId } = req.params;
const { index, original, updated } = req.body; const { index, original, updated } = req.body;
if (typeof index !== 'number' || index < 0 || !original || !updated) { if (typeof index !== 'number' || index < 0 || original == null || updated == null) {
return res.status(400).json({ error: 'Invalid request parameters' }); return res.status(400).json({ error: 'Invalid request parameters' });
} }

View file

@ -57,14 +57,42 @@ const findAllArtifacts = (message) => {
const replaceArtifactContent = (originalText, artifact, original, updated) => { const replaceArtifactContent = (originalText, artifact, original, updated) => {
const artifactContent = artifact.text.substring(artifact.start, artifact.end); const artifactContent = artifact.text.substring(artifact.start, artifact.end);
const relativeIndex = artifactContent.indexOf(original);
// Find boundaries between ARTIFACT_START and ARTIFACT_END
const contentStart = artifactContent.indexOf('\n', artifactContent.indexOf(ARTIFACT_START)) + 1;
const contentEnd = artifactContent.lastIndexOf(ARTIFACT_END);
if (contentStart === -1 || contentEnd === -1) {
return null;
}
// Check if there are code blocks
const codeBlockStart = artifactContent.indexOf('```\n', contentStart);
const codeBlockEnd = artifactContent.lastIndexOf('\n```', contentEnd);
// Determine where to look for the original content
let searchStart, searchEnd;
if (codeBlockStart !== -1 && codeBlockEnd !== -1) {
// If code blocks exist, search between them
searchStart = codeBlockStart + 4; // after ```\n
searchEnd = codeBlockEnd;
} else {
// Otherwise search in the whole artifact content
searchStart = contentStart;
searchEnd = contentEnd;
}
const innerContent = artifactContent.substring(searchStart, searchEnd);
// Remove trailing newline from original for comparison
const originalTrimmed = original.replace(/\n$/, '');
const relativeIndex = innerContent.indexOf(originalTrimmed);
if (relativeIndex === -1) { if (relativeIndex === -1) {
return null; return null;
} }
const absoluteIndex = artifact.start + relativeIndex; const absoluteIndex = artifact.start + searchStart + relativeIndex;
const endText = originalText.substring(absoluteIndex + original.length); const endText = originalText.substring(absoluteIndex + originalTrimmed.length);
const hasTrailingNewline = endText.startsWith('\n'); const hasTrailingNewline = endText.startsWith('\n');
const updatedText = const updatedText =

View file

@ -260,8 +260,61 @@ console.log(greeting);`;
codeExample, codeExample,
'updated content', 'updated content',
); );
console.log(result);
expect(result).toMatch(/id="2".*updated content/s); expect(result).toMatch(/id="2".*updated content/s);
expect(result).toMatch(new RegExp(`${ARTIFACT_START}.*updated content.*${ARTIFACT_END}`, 's')); expect(result).toMatch(new RegExp(`${ARTIFACT_START}.*updated content.*${ARTIFACT_END}`, 's'));
}); });
test('should handle empty content in artifact without code blocks', () => {
const artifactText = `${ARTIFACT_START}\n\n${ARTIFACT_END}`;
const artifact = {
start: 0,
end: artifactText.length,
text: artifactText,
source: 'text',
};
const result = replaceArtifactContent(artifactText, artifact, '', 'new content');
expect(result).toBe(`${ARTIFACT_START}\nnew content\n${ARTIFACT_END}`);
});
test('should handle empty content in artifact with code blocks', () => {
const artifactText = createArtifactText({ content: '' });
const artifact = {
start: 0,
end: artifactText.length,
text: artifactText,
source: 'text',
};
const result = replaceArtifactContent(artifactText, artifact, '', 'new content');
expect(result).toMatch(/```\nnew content\n```/);
});
test('should handle content with trailing newline in code blocks', () => {
const contentWithNewline = 'console.log("test")\n';
const message = {
text: `Some prefix text\n${createArtifactText({
content: contentWithNewline,
})}\nSome suffix text`,
};
const artifacts = findAllArtifacts(message);
expect(artifacts).toHaveLength(1);
const result = replaceArtifactContent(
message.text,
artifacts[0],
contentWithNewline,
'updated content',
);
// Should update the content and preserve artifact structure
expect(result).toContain('```\nupdated content\n```');
// Should preserve surrounding text
expect(result).toMatch(/^Some prefix text\n/);
expect(result).toMatch(/\nSome suffix text$/);
// Should not have extra newlines
expect(result).not.toContain('\n\n```');
expect(result).not.toContain('```\n\n');
});
}); });

View file

@ -364,7 +364,7 @@ class TTSService {
shouldContinue = false; shouldContinue = false;
}); });
const processChunks = createChunkProcessor(req.body.messageId); const processChunks = createChunkProcessor(req.user.id, req.body.messageId);
try { try {
while (shouldContinue) { while (shouldContinue) {

View file

@ -1,4 +1,5 @@
const { CacheKeys, findLastSeparatorIndex, SEPARATORS } = require('librechat-data-provider'); const { CacheKeys, findLastSeparatorIndex, SEPARATORS, Time } = require('librechat-data-provider');
const { getMessage } = require('~/models/Message');
const { getLogStores } = require('~/cache'); const { getLogStores } = require('~/cache');
/** /**
@ -47,10 +48,11 @@ const MAX_NOT_FOUND_COUNT = 6;
const MAX_NO_CHANGE_COUNT = 10; const MAX_NO_CHANGE_COUNT = 10;
/** /**
* @param {string} user
* @param {string} messageId * @param {string} messageId
* @returns {() => Promise<{ text: string, isFinished: boolean }[]>} * @returns {() => Promise<{ text: string, isFinished: boolean }[]>}
*/ */
function createChunkProcessor(messageId) { function createChunkProcessor(user, messageId) {
let notFoundCount = 0; let notFoundCount = 0;
let noChangeCount = 0; let noChangeCount = 0;
let processedText = ''; let processedText = '';
@ -73,15 +75,27 @@ function createChunkProcessor(messageId) {
} }
/** @type { string | { text: string; complete: boolean } } */ /** @type { string | { text: string; complete: boolean } } */
const message = await messageCache.get(messageId); let message = await messageCache.get(messageId);
if (!message) {
message = await getMessage({ user, messageId });
}
if (!message) { if (!message) {
notFoundCount++; notFoundCount++;
return []; return [];
} else {
messageCache.set(
messageId,
{
text: message.text,
complete: true,
},
Time.FIVE_MINUTES,
);
} }
const text = typeof message === 'string' ? message : message.text; const text = typeof message === 'string' ? message : message.text;
const complete = typeof message === 'string' ? false : message.complete; const complete = typeof message === 'string' ? false : message.complete ?? true;
if (text === processedText) { if (text === processedText) {
noChangeCount++; noChangeCount++;

View file

@ -3,6 +3,13 @@ const { createChunkProcessor, splitTextIntoChunks } = require('./streamAudio');
jest.mock('keyv'); jest.mock('keyv');
const globalCache = {}; const globalCache = {};
jest.mock('~/models/Message', () => {
return {
getMessage: jest.fn().mockImplementation((messageId) => {
return globalCache[messageId] || null;
}),
};
});
jest.mock('~/cache/getLogStores', () => { jest.mock('~/cache/getLogStores', () => {
return jest.fn().mockImplementation(() => { return jest.fn().mockImplementation(() => {
const EventEmitter = require('events'); const EventEmitter = require('events');
@ -56,9 +63,10 @@ describe('processChunks', () => {
jest.resetAllMocks(); jest.resetAllMocks();
mockMessageCache = { mockMessageCache = {
get: jest.fn(), get: jest.fn(),
set: jest.fn(),
}; };
require('~/cache/getLogStores').mockReturnValue(mockMessageCache); require('~/cache/getLogStores').mockReturnValue(mockMessageCache);
processChunks = createChunkProcessor('message-id'); processChunks = createChunkProcessor('userId', 'message-id');
}); });
it('should return an empty array when the message is not found', async () => { it('should return an empty array when the message is not found', async () => {

View file

@ -1,19 +1,15 @@
const throttle = require('lodash/throttle');
const { const {
Time, Constants,
CacheKeys,
StepTypes, StepTypes,
ContentTypes, ContentTypes,
ToolCallTypes, ToolCallTypes,
MessageContentTypes, MessageContentTypes,
AssistantStreamEvents, AssistantStreamEvents,
Constants,
} = require('librechat-data-provider'); } = require('librechat-data-provider');
const { retrieveAndProcessFile } = require('~/server/services/Files/process'); const { retrieveAndProcessFile } = require('~/server/services/Files/process');
const { processRequiredActions } = require('~/server/services/ToolService'); const { processRequiredActions } = require('~/server/services/ToolService');
const { createOnProgress, sendMessage, sleep } = require('~/server/utils'); const { createOnProgress, sendMessage, sleep } = require('~/server/utils');
const { processMessages } = require('~/server/services/Threads'); const { processMessages } = require('~/server/services/Threads');
const { getLogStores } = require('~/cache');
const { logger } = require('~/config'); const { logger } = require('~/config');
/** /**
@ -611,20 +607,8 @@ class StreamRunManager {
const index = this.getStepIndex(stepKey); const index = this.getStepIndex(stepKey);
this.orderedRunSteps.set(index, message_creation); this.orderedRunSteps.set(index, message_creation);
const messageCache = getLogStores(CacheKeys.MESSAGES); const { onProgress: progressCallback } = createOnProgress();
// Create the Factory Function to stream the message
const { onProgress: progressCallback } = createOnProgress({
onProgress: throttle(
() => {
messageCache.set(this.finalMessage.messageId, this.getText(), Time.FIVE_MINUTES);
},
3000,
{ trailing: false },
),
});
// This creates a function that attaches all of the parameters
// specified here to each SSE message generated by the TextStream
const onProgress = progressCallback({ const onProgress = progressCallback({
index, index,
res: this.res, res: this.res,

View file

@ -18,7 +18,12 @@ const citationRegex = /\[\^\d+?\^]/g;
const addSpaceIfNeeded = (text) => (text.length > 0 && !text.endsWith(' ') ? text + ' ' : text); const addSpaceIfNeeded = (text) => (text.length > 0 && !text.endsWith(' ') ? text + ' ' : text);
const base = { message: true, initial: true }; const base = { message: true, initial: true };
const createOnProgress = ({ generation = '', onProgress: _onProgress }) => { const createOnProgress = (
{ generation = '', onProgress: _onProgress } = {
generation: '',
onProgress: null,
},
) => {
let i = 0; let i = 0;
let tokens = addSpaceIfNeeded(generation); let tokens = addSpaceIfNeeded(generation);

Binary file not shown.

View file

@ -49,5 +49,14 @@ const App = () => {
export default () => ( export default () => (
<ScreenshotProvider> <ScreenshotProvider>
<App /> <App />
<iframe
src="/assets/silence.mp3"
allow="autoplay"
id="audio"
title="audio-silence"
style={{
display: 'none',
}}
/>
</ScreenshotProvider> </ScreenshotProvider>
); );

View file

@ -1,7 +1,9 @@
import { createContext, useContext } from 'react'; import { createContext, useContext } from 'react';
type MessageContext = { type MessageContext = {
messageId: string; messageId: string;
nextType?: string;
partIndex?: number; partIndex?: number;
isExpanded: boolean;
conversationId?: string | null; conversationId?: string | null;
}; };

View file

@ -62,10 +62,6 @@ export function Artifact({
const content = extractContent(props.children); const content = extractContent(props.children);
logger.log('artifacts', 'updateArtifact: content.length', content.length); logger.log('artifacts', 'updateArtifact: content.length', content.length);
if (!content || content.trim() === '') {
return;
}
const title = props.title ?? 'Untitled Artifact'; const title = props.title ?? 'Untitled Artifact';
const type = props.type ?? 'unknown'; const type = props.type ?? 'unknown';
const identifier = props.identifier ?? 'no-identifier'; const identifier = props.identifier ?? 'no-identifier';

View file

@ -43,7 +43,6 @@ const CodeEditor = ({
}, },
onError: () => { onError: () => {
setIsMutating(false); setIsMutating(false);
setCurrentCode(artifact.content);
}, },
}); });

View file

@ -1,20 +1,62 @@
import { useState } from 'react'; import { useState, useMemo, memo, useCallback } from 'react';
import { useRecoilValue } from 'recoil';
import { Atom, ChevronDown } from 'lucide-react'; import { Atom, ChevronDown } from 'lucide-react';
import type { MouseEvent } from 'react'; import type { MouseEvent, FC } from 'react';
import useLocalize from '~/hooks/useLocalize'; import { useLocalize } from '~/hooks';
import store from '~/store';
interface ThinkingProps { const BUTTON_STYLES = {
children: React.ReactNode; base: 'group mt-3 flex w-fit items-center justify-center rounded-xl bg-surface-tertiary px-3 py-2 text-xs leading-[18px] animate-thinking-appear',
} icon: 'icon-sm ml-1.5 transform-gpu text-text-primary transition-transform duration-200',
} as const;
const Thinking = ({ children }: ThinkingProps) => { const CONTENT_STYLES = {
wrapper: 'relative pl-3 text-text-secondary',
border:
'absolute left-0 h-[calc(100%-10px)] border-l-2 border-border-medium dark:border-border-heavy',
partBorder:
'absolute left-0 h-[calc(100%)] border-l-2 border-border-medium dark:border-border-heavy',
text: 'whitespace-pre-wrap leading-[26px]',
} as const;
export const ThinkingContent: FC<{ children: React.ReactNode; isPart?: boolean }> = memo(
({ isPart, children }) => (
<div className={CONTENT_STYLES.wrapper}>
<div className={isPart === true ? CONTENT_STYLES.partBorder : CONTENT_STYLES.border} />
<p className={CONTENT_STYLES.text}>{children}</p>
</div>
),
);
export const ThinkingButton = memo(
({
isExpanded,
onClick,
label,
}: {
isExpanded: boolean;
onClick: (e: MouseEvent<HTMLButtonElement>) => void;
label: string;
}) => (
<button type="button" onClick={onClick} className={BUTTON_STYLES.base}>
<Atom size={14} className="mr-1.5 text-text-secondary" />
{label}
<ChevronDown className={`${BUTTON_STYLES.icon} ${isExpanded ? 'rotate-180' : ''}`} />
</button>
),
);
const Thinking: React.ElementType = memo(({ children }: { children: React.ReactNode }) => {
const localize = useLocalize(); const localize = useLocalize();
const [isExpanded, setIsExpanded] = useState(true); const showThinking = useRecoilValue<boolean>(store.showThinking);
const [isExpanded, setIsExpanded] = useState(showThinking);
const handleClick = (e: MouseEvent<HTMLButtonElement>) => { const handleClick = useCallback((e: MouseEvent<HTMLButtonElement>) => {
e.preventDefault(); e.preventDefault();
setIsExpanded(!isExpanded); setIsExpanded((prev) => !prev);
}; }, []);
const label = useMemo(() => localize('com_ui_thoughts'), [localize]);
if (children == null) { if (children == null) {
return null; return null;
@ -22,28 +64,23 @@ const Thinking = ({ children }: ThinkingProps) => {
return ( return (
<div className="mb-3"> <div className="mb-3">
<button <ThinkingButton isExpanded={isExpanded} onClick={handleClick} label={label} />
type="button" <div
onClick={handleClick} className="grid transition-all duration-300 ease-out"
className="group mb-3 flex w-fit items-center justify-center rounded-xl bg-surface-tertiary px-3.5 py-2 text-xs leading-[18px] text-text-primary transition-colors hover:bg-surface-secondary" style={{
gridTemplateRows: isExpanded ? '1fr' : '0fr',
}}
> >
<Atom size={14} className="mr-1.5 text-text-secondary" /> <div className="overflow-hidden">
{localize('com_ui_thoughts')} <ThinkingContent>{children}</ThinkingContent>
<ChevronDown
className="icon-sm ml-1.5 text-text-primary transition-transform duration-200"
style={{
transform: isExpanded ? 'rotate(180deg)' : 'rotate(0deg)',
}}
/>
</button>
{isExpanded && (
<div className="relative pl-3 text-text-secondary">
<div className="absolute left-0 top-[5px] h-[calc(100%-10px)] border-l-2 border-border-medium dark:border-border-heavy" />
<p className="my-4 whitespace-pre-wrap leading-[26px]">{children}</p>
</div> </div>
)} </div>
</div> </div>
); );
}; });
export default Thinking; ThinkingButton.displayName = 'ThinkingButton';
ThinkingContent.displayName = 'ThinkingContent';
Thinking.displayName = 'Thinking';
export default memo(Thinking);

View file

@ -1,3 +1,4 @@
/* eslint-disable jsx-a11y/media-has-caption */
import { useEffect, useMemo } from 'react'; import { useEffect, useMemo } from 'react';
import { useRecoilValue } from 'recoil'; import { useRecoilValue } from 'recoil';
import type { TMessageAudio } from '~/common'; import type { TMessageAudio } from '~/common';
@ -78,7 +79,6 @@ export function BrowserTTS({ isLast, index, messageId, content, className }: TMe
logger.error('Error fetching audio:', error); logger.error('Error fetching audio:', error);
}} }}
id={`audio-${messageId}`} id={`audio-${messageId}`}
muted
autoPlay autoPlay
/> />
</> </>
@ -169,7 +169,6 @@ export function EdgeTTS({ isLast, index, messageId, content, className }: TMessa
logger.error('Error fetching audio:', error); logger.error('Error fetching audio:', error);
}} }}
id={`audio-${messageId}`} id={`audio-${messageId}`}
muted
autoPlay autoPlay
/> />
) : null} ) : null}
@ -248,7 +247,6 @@ export function ExternalTTS({ isLast, index, messageId, content, className }: TM
logger.error('Error fetching audio:', error); logger.error('Error fetching audio:', error);
}} }}
id={`audio-${messageId}`} id={`audio-${messageId}`}
muted
autoPlay autoPlay
/> />
</> </>

View file

@ -53,7 +53,7 @@ export default function StreamAudio({ index = 0 }) {
const shouldFetch = !!( const shouldFetch = !!(
token != null && token != null &&
automaticPlayback && automaticPlayback &&
isSubmitting && !isSubmitting &&
latestMessage && latestMessage &&
!latestMessage.isCreatedByUser && !latestMessage.isCreatedByUser &&
latestText && latestText &&
@ -118,14 +118,14 @@ export default function StreamAudio({ index = 0 }) {
} }
let done = false; let done = false;
const chunks: Uint8Array[] = []; const chunks: ArrayBuffer[] = [];
while (!done) { while (!done) {
const readPromise = reader.read(); const readPromise = reader.read();
const { value, done: readerDone } = (await Promise.race([ const { value, done: readerDone } = (await Promise.race([
readPromise, readPromise,
timeoutPromise(maxPromiseTime, promiseTimeoutMessage), timeoutPromise(maxPromiseTime, promiseTimeoutMessage),
])) as ReadableStreamReadResult<Uint8Array>; ])) as ReadableStreamReadResult<ArrayBuffer>;
if (cacheTTS && value) { if (cacheTTS && value) {
chunks.push(value); chunks.push(value);
@ -195,8 +195,8 @@ export default function StreamAudio({ index = 0 }) {
useEffect(() => { useEffect(() => {
if ( if (
playbackRate && playbackRate != null &&
globalAudioURL && globalAudioURL != null &&
playbackRate > 0 && playbackRate > 0 &&
audioRef.current && audioRef.current &&
audioRef.current.playbackRate !== playbackRate audioRef.current.playbackRate !== playbackRate
@ -213,6 +213,7 @@ export default function StreamAudio({ index = 0 }) {
logger.log('StreamAudio.tsx - globalAudioURL:', globalAudioURL); logger.log('StreamAudio.tsx - globalAudioURL:', globalAudioURL);
return ( return (
// eslint-disable-next-line jsx-a11y/media-has-caption
<audio <audio
ref={audioRef} ref={audioRef}
controls controls
@ -226,7 +227,6 @@ export default function StreamAudio({ index = 0 }) {
}} }}
src={globalAudioURL ?? undefined} src={globalAudioURL ?? undefined}
id={globalAudioId} id={globalAudioId}
muted
autoPlay autoPlay
/> />
); );

View file

@ -1,8 +1,10 @@
import { memo, useMemo } from 'react'; import { memo, useMemo, useState } from 'react';
import { useRecoilValue } from 'recoil'; import { useRecoilValue, useRecoilState } from 'recoil';
import { ContentTypes } from 'librechat-data-provider'; import { ContentTypes } from 'librechat-data-provider';
import type { TMessageContentParts, TAttachment, Agents } from 'librechat-data-provider'; import type { TMessageContentParts, TAttachment, Agents } from 'librechat-data-provider';
import { ThinkingButton } from '~/components/Artifacts/Thinking';
import EditTextPart from './Parts/EditTextPart'; import EditTextPart from './Parts/EditTextPart';
import useLocalize from '~/hooks/useLocalize';
import { mapAttachments } from '~/utils/map'; import { mapAttachments } from '~/utils/map';
import { MessageContext } from '~/Providers'; import { MessageContext } from '~/Providers';
import store from '~/store'; import store from '~/store';
@ -39,11 +41,20 @@ const ContentParts = memo(
siblingIdx, siblingIdx,
setSiblingIdx, setSiblingIdx,
}: ContentPartsProps) => { }: ContentPartsProps) => {
const localize = useLocalize();
const [showThinking, setShowThinking] = useRecoilState<boolean>(store.showThinking);
const [isExpanded, setIsExpanded] = useState(showThinking);
const messageAttachmentsMap = useRecoilValue(store.messageAttachmentsMap); const messageAttachmentsMap = useRecoilValue(store.messageAttachmentsMap);
const attachmentMap = useMemo( const attachmentMap = useMemo(
() => mapAttachments(attachments ?? messageAttachmentsMap[messageId] ?? []), () => mapAttachments(attachments ?? messageAttachmentsMap[messageId] ?? []),
[attachments, messageAttachmentsMap, messageId], [attachments, messageAttachmentsMap, messageId],
); );
const hasReasoningParts = useMemo(
() => content?.some((part) => part?.type === ContentTypes.THINK && part.think) ?? false,
[content],
);
if (!content) { if (!content) {
return null; return null;
} }
@ -74,6 +85,21 @@ const ContentParts = memo(
return ( return (
<> <>
{hasReasoningParts && (
<div className="mb-5">
<ThinkingButton
isExpanded={isExpanded}
onClick={() =>
setIsExpanded((prev) => {
const val = !prev;
setShowThinking(val);
return val;
})
}
label={isSubmitting ? localize('com_ui_thinking') : localize('com_ui_thoughts')}
/>
</div>
)}
{content {content
.filter((part) => part) .filter((part) => part)
.map((part, idx) => { .map((part, idx) => {
@ -88,6 +114,8 @@ const ContentParts = memo(
messageId, messageId,
conversationId, conversationId,
partIndex: idx, partIndex: idx,
isExpanded,
nextType: content[idx + 1]?.type,
}} }}
> >
<Part <Part

View file

@ -23,7 +23,7 @@ import useLocalize from '~/hooks/useLocalize';
import store from '~/store'; import store from '~/store';
type TCodeProps = { type TCodeProps = {
inline: boolean; inline?: boolean;
className?: string; className?: string;
children: React.ReactNode; children: React.ReactNode;
}; };
@ -42,7 +42,7 @@ export const code: React.ElementType = memo(({ className, children }: TCodeProps
}, [children, resetCounter]); }, [children, resetCounter]);
if (isMath) { if (isMath) {
return children; return <>{children}</>;
} else if (isSingleLine) { } else if (isSingleLine) {
return ( return (
<code onDoubleClick={handleDoubleClick} className={className}> <code onDoubleClick={handleDoubleClick} className={className}>
@ -71,79 +71,86 @@ export const codeNoExecution: React.ElementType = memo(({ className, children }:
} }
}); });
export const a: React.ElementType = memo( type TAnchorProps = {
({ href, children }: { href: string; children: React.ReactNode }) => { href: string;
const user = useRecoilValue(store.user); children: React.ReactNode;
const { showToast } = useToastContext(); };
const localize = useLocalize();
const { export const a: React.ElementType = memo(({ href, children }: TAnchorProps) => {
file_id = '', const user = useRecoilValue(store.user);
filename = '', const { showToast } = useToastContext();
filepath, const localize = useLocalize();
} = useMemo(() => {
const pattern = new RegExp(`(?:files|outputs)/${user?.id}/([^\\s]+)`);
const match = href.match(pattern);
if (match && match[0]) {
const path = match[0];
const parts = path.split('/');
const name = parts.pop();
const file_id = parts.pop();
return { file_id, filename: name, filepath: path };
}
return { file_id: '', filename: '', filepath: '' };
}, [user?.id, href]);
const { refetch: downloadFile } = useFileDownload(user?.id ?? '', file_id); const {
const props: { target?: string; onClick?: React.MouseEventHandler } = { target: '_new' }; file_id = '',
filename = '',
if (!file_id || !filename) { filepath,
return ( } = useMemo(() => {
<a href={href} {...props}> const pattern = new RegExp(`(?:files|outputs)/${user?.id}/([^\\s]+)`);
{children} const match = href.match(pattern);
</a> if (match && match[0]) {
); const path = match[0];
const parts = path.split('/');
const name = parts.pop();
const file_id = parts.pop();
return { file_id, filename: name, filepath: path };
} }
return { file_id: '', filename: '', filepath: '' };
}, [user?.id, href]);
const handleDownload = async (event: React.MouseEvent<HTMLAnchorElement>) => { const { refetch: downloadFile } = useFileDownload(user?.id ?? '', file_id);
event.preventDefault(); const props: { target?: string; onClick?: React.MouseEventHandler } = { target: '_new' };
try {
const stream = await downloadFile();
if (stream.data == null || stream.data === '') {
console.error('Error downloading file: No data found');
showToast({
status: 'error',
message: localize('com_ui_download_error'),
});
return;
}
const link = document.createElement('a');
link.href = stream.data;
link.setAttribute('download', filename);
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
window.URL.revokeObjectURL(stream.data);
} catch (error) {
console.error('Error downloading file:', error);
}
};
props.onClick = handleDownload;
props.target = '_blank';
if (!file_id || !filename) {
return ( return (
<a <a href={href} {...props}>
href={filepath.startsWith('files/') ? `/api/${filepath}` : `/api/files/${filepath}`}
{...props}
>
{children} {children}
</a> </a>
); );
}, }
);
export const p: React.ElementType = memo(({ children }: { children: React.ReactNode }) => { const handleDownload = async (event: React.MouseEvent<HTMLAnchorElement>) => {
event.preventDefault();
try {
const stream = await downloadFile();
if (stream.data == null || stream.data === '') {
console.error('Error downloading file: No data found');
showToast({
status: 'error',
message: localize('com_ui_download_error'),
});
return;
}
const link = document.createElement('a');
link.href = stream.data;
link.setAttribute('download', filename);
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
window.URL.revokeObjectURL(stream.data);
} catch (error) {
console.error('Error downloading file:', error);
}
};
props.onClick = handleDownload;
props.target = '_blank';
return (
<a
href={filepath.startsWith('files/') ? `/api/${filepath}` : `/api/files/${filepath}`}
{...props}
>
{children}
</a>
);
});
type TParagraphProps = {
children: React.ReactNode;
};
export const p: React.ElementType = memo(({ children }: TParagraphProps) => {
return <p className="mb-2 whitespace-pre-wrap">{children}</p>; return <p className="mb-2 whitespace-pre-wrap">{children}</p>;
}); });
@ -157,27 +164,40 @@ type TContentProps = {
const Markdown = memo(({ content = '', showCursor, isLatestMessage }: TContentProps) => { const Markdown = memo(({ content = '', showCursor, isLatestMessage }: TContentProps) => {
const LaTeXParsing = useRecoilValue<boolean>(store.LaTeXParsing); const LaTeXParsing = useRecoilValue<boolean>(store.LaTeXParsing);
const isInitializing = content === ''; const isInitializing = content === '';
let currentContent = content; const currentContent = useMemo(() => {
if (!isInitializing) { if (isInitializing) {
currentContent = currentContent.replace('<think>', ':::thinking') || ''; return '';
currentContent = currentContent.replace('</think>', ':::') || ''; }
currentContent = LaTeXParsing ? preprocessLaTeX(currentContent) : currentContent; return LaTeXParsing ? preprocessLaTeX(content) : content;
} }, [content, LaTeXParsing, isInitializing]);
const rehypePlugins = [ const rehypePlugins = useMemo(
[rehypeKatex, { output: 'mathml' }], () => [
[ [rehypeKatex, { output: 'mathml' }],
rehypeHighlight, [
{ rehypeHighlight,
detect: true, {
ignoreMissing: true, detect: true,
subset: langSubset, ignoreMissing: true,
}, subset: langSubset,
},
],
], ],
]; [],
);
const remarkPlugins: Pluggable[] = useMemo(
() => [
supersub,
remarkGfm,
remarkDirective,
artifactPlugin,
[remarkMath, { singleDollarTextMath: true }],
],
[],
);
if (isInitializing) { if (isInitializing) {
return ( return (
@ -189,14 +209,6 @@ const Markdown = memo(({ content = '', showCursor, isLatestMessage }: TContentPr
); );
} }
const remarkPlugins: Pluggable[] = [
supersub,
remarkGfm,
remarkDirective,
artifactPlugin,
[remarkMath, { singleDollarTextMath: true }],
];
return ( return (
<ArtifactProvider> <ArtifactProvider>
<CodeBlockProvider> <CodeBlockProvider>
@ -205,7 +217,6 @@ const Markdown = memo(({ content = '', showCursor, isLatestMessage }: TContentPr
remarkPlugins={remarkPlugins} remarkPlugins={remarkPlugins}
/* @ts-ignore */ /* @ts-ignore */
rehypePlugins={rehypePlugins} rehypePlugins={rehypePlugins}
// linkTarget="_new"
components={ components={
{ {
code, code,
@ -218,7 +229,7 @@ const Markdown = memo(({ content = '', showCursor, isLatestMessage }: TContentPr
} }
} }
> >
{isLatestMessage && showCursor === true ? currentContent + cursor : currentContent} {isLatestMessage && (showCursor ?? false) ? currentContent + cursor : currentContent}
</ReactMarkdown> </ReactMarkdown>
</CodeBlockProvider> </CodeBlockProvider>
</ArtifactProvider> </ArtifactProvider>

View file

@ -10,6 +10,7 @@ import type { TMessageContentParts, TAttachment } from 'librechat-data-provider'
import { ErrorMessage } from './MessageContent'; import { ErrorMessage } from './MessageContent';
import ExecuteCode from './Parts/ExecuteCode'; import ExecuteCode from './Parts/ExecuteCode';
import RetrievalCall from './RetrievalCall'; import RetrievalCall from './RetrievalCall';
import Reasoning from './Parts/Reasoning';
import CodeAnalyze from './CodeAnalyze'; import CodeAnalyze from './CodeAnalyze';
import Container from './Container'; import Container from './Container';
import ToolCall from './ToolCall'; import ToolCall from './ToolCall';
@ -46,6 +47,12 @@ const Part = memo(({ part, isSubmitting, attachments, showCursor, isCreatedByUse
<Text text={text} isCreatedByUser={isCreatedByUser} showCursor={showCursor} /> <Text text={text} isCreatedByUser={isCreatedByUser} showCursor={showCursor} />
</Container> </Container>
); );
} else if (part.type === ContentTypes.THINK) {
const reasoning = typeof part.think === 'string' ? part.think : part.think.value;
if (typeof reasoning !== 'string') {
return null;
}
return <Reasoning reasoning={reasoning} />;
} else if (part.type === ContentTypes.TOOL_CALL) { } else if (part.type === ContentTypes.TOOL_CALL) {
const toolCall = part[ContentTypes.TOOL_CALL]; const toolCall = part[ContentTypes.TOOL_CALL];

View file

@ -0,0 +1,34 @@
import { memo, useMemo } from 'react';
import { ContentTypes } from 'librechat-data-provider';
import { ThinkingContent } from '~/components/Artifacts/Thinking';
import { useMessageContext } from '~/Providers';
import { cn } from '~/utils';
type ReasoningProps = {
reasoning: string;
};
const Reasoning = memo(({ reasoning }: ReasoningProps) => {
const { isExpanded, nextType } = useMessageContext();
const reasoningText = useMemo(() => {
return reasoning.replace(/^<think>\s*/, '').replace(/\s*<\/think>$/, '');
}, [reasoning]);
return (
<div
className={cn(
'grid transition-all duration-300 ease-out',
nextType !== ContentTypes.THINK && isExpanded && 'mb-10',
)}
style={{
gridTemplateRows: isExpanded ? '1fr' : '0fr',
}}
>
<div className="overflow-hidden">
<ThinkingContent isPart={true}>{reasoningText}</ThinkingContent>
</div>
</div>
);
});
export default Reasoning;

View file

@ -46,7 +46,7 @@ const TextPart = memo(({ text, isCreatedByUser, showCursor }: TextPartProps) =>
showCursorState && !!text.length ? 'result-streaming' : '', showCursorState && !!text.length ? 'result-streaming' : '',
'markdown prose message-content dark:prose-invert light w-full break-words', 'markdown prose message-content dark:prose-invert light w-full break-words',
isCreatedByUser && !enableUserMsgMarkdown && 'whitespace-pre-wrap', isCreatedByUser && !enableUserMsgMarkdown && 'whitespace-pre-wrap',
isCreatedByUser ? 'dark:text-gray-20' : 'dark:text-gray-70', isCreatedByUser ? 'dark:text-gray-20' : 'dark:text-gray-100',
)} )}
> >
{content} {content}

View file

@ -109,7 +109,9 @@ export default function HoverButtons({
messageId={message.messageId} messageId={message.messageId}
content={message.content ?? message.text} content={message.content ?? message.text}
isLast={isLast} isLast={isLast}
className="hover-button rounded-md p-1 pl-0 text-gray-500 hover:bg-gray-100 hover:text-gray-500 dark:text-gray-400/70 dark:hover:bg-gray-700 dark:hover:text-gray-200 disabled:dark:hover:text-gray-400 md:group-hover:visible md:group-[.final-completion]:visible" className={cn(
'ml-0 flex items-center gap-1.5 rounded-md p-1 text-xs hover:bg-gray-100 hover:text-gray-500 focus:opacity-100 dark:text-gray-400/70 dark:hover:bg-gray-700 dark:hover:text-gray-200 disabled:dark:hover:text-gray-400 md:group-hover:visible md:group-[.final-completion]:visible',
)}
/> />
)} )}
{isEditableEndpoint && ( {isEditableEndpoint && (

View file

@ -1,4 +1,3 @@
// client/src/components/Chat/Messages/MessageAudio.tsx
import { memo } from 'react'; import { memo } from 'react';
import { useRecoilValue } from 'recoil'; import { useRecoilValue } from 'recoil';
import type { TMessageAudio } from '~/common'; import type { TMessageAudio } from '~/common';

View file

@ -5,6 +5,7 @@ import SendMessageKeyEnter from './EnterToSend';
import ShowCodeSwitch from './ShowCodeSwitch'; import ShowCodeSwitch from './ShowCodeSwitch';
import { ForkSettings } from './ForkSettings'; import { ForkSettings } from './ForkSettings';
import ChatDirection from './ChatDirection'; import ChatDirection from './ChatDirection';
import ShowThinking from './ShowThinking';
import LaTeXParsing from './LaTeXParsing'; import LaTeXParsing from './LaTeXParsing';
import ModularChat from './ModularChat'; import ModularChat from './ModularChat';
import SaveDraft from './SaveDraft'; import SaveDraft from './SaveDraft';
@ -37,6 +38,9 @@ function Chat() {
<div className="pb-3"> <div className="pb-3">
<LaTeXParsing /> <LaTeXParsing />
</div> </div>
<div className="pb-3">
<ShowThinking />
</div>
</div> </div>
); );
} }

View file

@ -0,0 +1,37 @@
import { useRecoilState } from 'recoil';
import HoverCardSettings from '../HoverCardSettings';
import { Switch } from '~/components/ui';
import useLocalize from '~/hooks/useLocalize';
import store from '~/store';
export default function SaveDraft({
onCheckedChange,
}: {
onCheckedChange?: (value: boolean) => void;
}) {
const [showThinking, setSaveDrafts] = useRecoilState<boolean>(store.showThinking);
const localize = useLocalize();
const handleCheckedChange = (value: boolean) => {
setSaveDrafts(value);
if (onCheckedChange) {
onCheckedChange(value);
}
};
return (
<div className="flex items-center justify-between">
<div className="flex items-center space-x-2">
<div>{localize('com_nav_show_thinking')}</div>
<HoverCardSettings side="bottom" text="com_nav_info_show_thinking" />
</div>
<Switch
id="showThinking"
checked={showThinking}
onCheckedChange={handleCheckedChange}
className="ml-4"
data-testid="showThinking"
/>
</div>
);
}

View file

@ -6,12 +6,13 @@ import store from '~/store';
function usePauseGlobalAudio(index = 0) { function usePauseGlobalAudio(index = 0) {
/* Global Audio Variables */ /* Global Audio Variables */
const setAudioRunId = useSetRecoilState(store.audioRunFamily(index)); const setAudioRunId = useSetRecoilState(store.audioRunFamily(index));
const setActiveRunId = useSetRecoilState(store.activeRunFamily(index));
const setGlobalIsPlaying = useSetRecoilState(store.globalAudioPlayingFamily(index)); const setGlobalIsPlaying = useSetRecoilState(store.globalAudioPlayingFamily(index));
const setIsGlobalAudioFetching = useSetRecoilState(store.globalAudioFetchingFamily(index)); const setIsGlobalAudioFetching = useSetRecoilState(store.globalAudioFetchingFamily(index));
const [globalAudioURL, setGlobalAudioURL] = useRecoilState(store.globalAudioURLFamily(index)); const [globalAudioURL, setGlobalAudioURL] = useRecoilState(store.globalAudioURLFamily(index));
const pauseGlobalAudio = useCallback(() => { const pauseGlobalAudio = useCallback(() => {
if (globalAudioURL) { if (globalAudioURL != null && globalAudioURL !== '') {
const globalAudio = document.getElementById(globalAudioId); const globalAudio = document.getElementById(globalAudioId);
if (globalAudio) { if (globalAudio) {
console.log('Pausing global audio', globalAudioURL); console.log('Pausing global audio', globalAudioURL);
@ -21,14 +22,16 @@ function usePauseGlobalAudio(index = 0) {
URL.revokeObjectURL(globalAudioURL); URL.revokeObjectURL(globalAudioURL);
setIsGlobalAudioFetching(false); setIsGlobalAudioFetching(false);
setGlobalAudioURL(null); setGlobalAudioURL(null);
setActiveRunId(null);
setAudioRunId(null); setAudioRunId(null);
} }
}, [ }, [
setAudioRunId,
setActiveRunId,
globalAudioURL, globalAudioURL,
setGlobalAudioURL, setGlobalAudioURL,
setGlobalIsPlaying, setGlobalIsPlaying,
setIsGlobalAudioFetching, setIsGlobalAudioFetching,
setAudioRunId,
]); ]);
return { pauseGlobalAudio }; return { pauseGlobalAudio };

View file

@ -4,9 +4,9 @@ import {
Constants, Constants,
QueryKeys, QueryKeys,
ContentTypes, ContentTypes,
EModelEndpoint,
parseCompactConvo, parseCompactConvo,
isAssistantsEndpoint, isAssistantsEndpoint,
EModelEndpoint,
} from 'librechat-data-provider'; } from 'librechat-data-provider';
import { useSetRecoilState, useResetRecoilState, useRecoilValue } from 'recoil'; import { useSetRecoilState, useResetRecoilState, useRecoilValue } from 'recoil';
import type { import type {
@ -31,6 +31,15 @@ const logChatRequest = (request: Record<string, unknown>) => {
logger.log('====================================='); logger.log('=====================================');
}; };
const usesContentStream = (endpoint: EModelEndpoint | undefined, endpointType?: string) => {
if (endpointType === EModelEndpoint.custom) {
return true;
}
if (endpoint === EModelEndpoint.openAI || endpoint === EModelEndpoint.azureOpenAI) {
return true;
}
};
export default function useChatFunctions({ export default function useChatFunctions({
index = 0, index = 0,
files, files,
@ -219,8 +228,8 @@ export default function useChatFunctions({
unfinished: false, unfinished: false,
isCreatedByUser: false, isCreatedByUser: false,
isEdited: isEditOrContinue, isEdited: isEditOrContinue,
iconURL: convo.iconURL, iconURL: convo?.iconURL,
model: convo.model, model: convo?.model,
error: false, error: false,
}; };
@ -247,6 +256,17 @@ export default function useChatFunctions({
}, },
]; ];
setShowStopButton(true); setShowStopButton(true);
} else if (usesContentStream(endpoint, endpointType)) {
initialResponse.text = '';
initialResponse.content = [
{
type: ContentTypes.TEXT,
[ContentTypes.TEXT]: {
value: responseText,
},
},
];
setShowStopButton(true);
} else { } else {
setShowStopButton(true); setShowStopButton(true);
} }

View file

@ -87,7 +87,7 @@ function useTextToSpeechExternal({
setDownloadFile(false); setDownloadFile(false);
}; };
const { mutate: processAudio, isLoading: isProcessing } = useTextToSpeechMutation({ const { mutate: processAudio } = useTextToSpeechMutation({
onMutate: (variables) => { onMutate: (variables) => {
const inputText = (variables.get('input') ?? '') as string; const inputText = (variables.get('input') ?? '') as string;
if (inputText.length >= 4096) { if (inputText.length >= 4096) {
@ -178,13 +178,14 @@ function useTextToSpeechExternal({
promiseAudioRef.current = null; promiseAudioRef.current = null;
setIsSpeaking(false); setIsSpeaking(false);
} }
}, []); }, [setIsSpeaking]);
useEffect(() => cancelPromiseSpeech, [cancelPromiseSpeech]); useEffect(() => cancelPromiseSpeech, [cancelPromiseSpeech]);
const isLoading = useMemo(() => { const isLoading = useMemo(
return isProcessing || (isLast && globalIsFetching && !globalIsPlaying); () => isLast && globalIsFetching && !globalIsPlaying,
}, [isProcessing, globalIsFetching, globalIsPlaying, isLast]); [globalIsFetching, globalIsPlaying, isLast],
);
const { data: voicesData = [] } = useVoicesQuery(); const { data: voicesData = [] } = useVoicesQuery();

View file

@ -33,8 +33,11 @@ type TStepEvent = {
type MessageDeltaUpdate = { type: ContentTypes.TEXT; text: string; tool_call_ids?: string[] }; type MessageDeltaUpdate = { type: ContentTypes.TEXT; text: string; tool_call_ids?: string[] };
type ReasoningDeltaUpdate = { type: ContentTypes.THINK; think: string };
type AllContentTypes = type AllContentTypes =
| ContentTypes.TEXT | ContentTypes.TEXT
| ContentTypes.THINK
| ContentTypes.TOOL_CALL | ContentTypes.TOOL_CALL
| ContentTypes.IMAGE_FILE | ContentTypes.IMAGE_FILE
| ContentTypes.IMAGE_URL | ContentTypes.IMAGE_URL
@ -84,6 +87,18 @@ export default function useStepHandler({
if (contentPart.tool_call_ids != null) { if (contentPart.tool_call_ids != null) {
update.tool_call_ids = contentPart.tool_call_ids; update.tool_call_ids = contentPart.tool_call_ids;
} }
updatedContent[index] = update;
} else if (
contentType.startsWith(ContentTypes.THINK) &&
ContentTypes.THINK in contentPart &&
typeof contentPart.think === 'string'
) {
const currentContent = updatedContent[index] as ReasoningDeltaUpdate;
const update: ReasoningDeltaUpdate = {
type: ContentTypes.THINK,
think: (currentContent.think || '') + contentPart.think,
};
updatedContent[index] = update; updatedContent[index] = update;
} else if (contentType === ContentTypes.IMAGE_URL && 'image_url' in contentPart) { } else if (contentType === ContentTypes.IMAGE_URL && 'image_url' in contentPart) {
const currentContent = updatedContent[index] as { const currentContent = updatedContent[index] as {
@ -215,6 +230,28 @@ export default function useStepHandler({
const updatedResponse = updateContent(response, runStep.index, contentPart); const updatedResponse = updateContent(response, runStep.index, contentPart);
messageMap.current.set(responseMessageId, updatedResponse);
const currentMessages = getMessages() || [];
setMessages([...currentMessages.slice(0, -1), updatedResponse]);
}
} else if (event === 'on_reasoning_delta') {
const reasoningDelta = data as Agents.ReasoningDeltaEvent;
const runStep = stepMap.current.get(reasoningDelta.id);
const responseMessageId = runStep?.runId ?? '';
if (!runStep || !responseMessageId) {
console.warn('No run step or runId found for reasoning delta event');
return;
}
const response = messageMap.current.get(responseMessageId);
if (response && reasoningDelta.delta.content != null) {
const contentPart = Array.isArray(reasoningDelta.delta.content)
? reasoningDelta.delta.content[0]
: reasoningDelta.delta.content;
const updatedResponse = updateContent(response, runStep.index, contentPart);
messageMap.current.set(responseMessageId, updatedResponse); messageMap.current.set(responseMessageId, updatedResponse);
const currentMessages = getMessages() || []; const currentMessages = getMessages() || [];
setMessages([...currentMessages.slice(0, -1), updatedResponse]); setMessages([...currentMessages.slice(0, -1), updatedResponse]);

View file

@ -410,6 +410,7 @@ export default {
com_ui_more_info: 'More info', com_ui_more_info: 'More info',
com_ui_preview: 'Preview', com_ui_preview: 'Preview',
com_ui_thoughts: 'Thoughts', com_ui_thoughts: 'Thoughts',
com_ui_thinking: 'Thinking...',
com_ui_upload: 'Upload', com_ui_upload: 'Upload',
com_ui_connect: 'Connect', com_ui_connect: 'Connect',
com_ui_locked: 'Locked', com_ui_locked: 'Locked',
@ -843,10 +844,13 @@ export default {
com_nav_enable_cache_tts: 'Enable cache TTS', com_nav_enable_cache_tts: 'Enable cache TTS',
com_nav_voice_select: 'Voice', com_nav_voice_select: 'Voice',
com_nav_enable_cloud_browser_voice: 'Use cloud-based voices', com_nav_enable_cloud_browser_voice: 'Use cloud-based voices',
com_nav_show_thinking: 'Open Thinking Dropdowns by Default',
com_nav_info_enter_to_send: com_nav_info_enter_to_send:
'When enabled, pressing `ENTER` will send your message. When disabled, pressing Enter will add a new line, and you\'ll need to press `CTRL + ENTER` / `⌘ + ENTER` to send your message.', 'When enabled, pressing `ENTER` will send your message. When disabled, pressing Enter will add a new line, and you\'ll need to press `CTRL + ENTER` / `⌘ + ENTER` to send your message.',
com_nav_info_save_draft: com_nav_info_save_draft:
'When enabled, the text and attachments you enter in the chat form will be automatically saved locally as drafts. These drafts will be available even if you reload the page or switch to a different conversation. Drafts are stored locally on your device and are deleted once the message is sent.', 'When enabled, the text and attachments you enter in the chat form will be automatically saved locally as drafts. These drafts will be available even if you reload the page or switch to a different conversation. Drafts are stored locally on your device and are deleted once the message is sent.',
com_nav_info_show_thinking:
'When enabled, the chat will display the thinking dropdowns open by default, allowing you to view the AI\'s reasoning in real-time. When disabled, the thinking dropdowns will remain closed by default for a cleaner and more streamlined interface',
com_nav_info_fork_change_default: com_nav_info_fork_change_default:
'`Visible messages only` includes just the direct path to the selected message. `Include related branches` adds branches along the path. `Include all to/from here` includes all connected messages and branches.', '`Visible messages only` includes just the direct path to the selected message. `Include related branches` adds branches along the path. `Include all to/from here` includes all connected messages and branches.',
com_nav_info_fork_split_target_setting: com_nav_info_fork_split_target_setting:

View file

@ -36,8 +36,8 @@ const localStorageAtoms = {
saveDrafts: atomWithLocalStorage('saveDrafts', true), saveDrafts: atomWithLocalStorage('saveDrafts', true),
forkSetting: atomWithLocalStorage('forkSetting', ''), forkSetting: atomWithLocalStorage('forkSetting', ''),
splitAtTarget: atomWithLocalStorage('splitAtTarget', false), splitAtTarget: atomWithLocalStorage('splitAtTarget', false),
rememberDefaultFork: atomWithLocalStorage(LocalStorageKeys.REMEMBER_FORK_OPTION, false), rememberDefaultFork: atomWithLocalStorage(LocalStorageKeys.REMEMBER_FORK_OPTION, false),
showThinking: atomWithLocalStorage('showThinking', false),
// Beta features settings // Beta features settings
modularChat: atomWithLocalStorage('modularChat', true), modularChat: atomWithLocalStorage('modularChat', true),

View file

@ -2014,7 +2014,7 @@ button.scroll-convo {
list-style-position: outside; list-style-position: outside;
margin-top: 1em; margin-top: 1em;
margin-bottom: 1em; margin-bottom: 1em;
padding-left: 3em; padding-left: 1em;
} }
.prose li, .prose li,

View file

@ -1,134 +0,0 @@
import { WebSocket } from 'ws';
// const { ElevenLabsClient } = require('elevenlabs');
const ELEVENLABS_API_KEY = 'a495399653cc5824ba1e41d914473e07';
const VOICE_ID = '1RVpBInY9YUYMLSUQReV';
interface AudioChunk {
audio: string;
isFinal: boolean;
alignment: {
char_start_times_ms: number[];
chars_durations_ms: number[];
chars: string[];
};
normalizedAlignment: {
char_start_times_ms: number[];
chars_durations_ms: number[];
chars: string[];
};
}
export function inputStreamTextToSpeech(
textStream: AsyncIterable<string>,
): AsyncGenerator<AudioChunk> {
const model = 'eleven_turbo_v2';
const wsUrl = `wss://api.elevenlabs.io/v1/text-to-speech/${VOICE_ID}/stream-input?model_id=${model}`;
const socket = new WebSocket(wsUrl);
socket.onopen = function () {
const streamStart = {
text: ' ',
voice_settings: {
stability: 0.5,
similarity_boost: 0.8,
},
xi_api_key: ELEVENLABS_API_KEY,
};
socket.send(JSON.stringify(streamStart));
// send stream until done
const streamComplete = new Promise((resolve, reject) => {
(async () => {
for await (const message of textStream) {
const request = {
text: message,
try_trigger_generation: true,
};
socket.send(JSON.stringify(request));
}
})()
.then(resolve)
.catch(reject);
});
streamComplete
.then(() => {
const endStream = {
text: '',
};
socket.send(JSON.stringify(endStream));
})
.catch((e) => {
throw e;
});
};
return (async function* audioStream() {
let isDone = false;
let chunks: AudioChunk[] = [];
let resolve: (value: unknown) => void;
let waitForMessage = new Promise((r) => (resolve = r));
socket.onmessage = function (event) {
console.log(event);
const audioChunk = JSON.parse(event.data as string) as AudioChunk;
if (audioChunk.audio && audioChunk.alignment) {
chunks.push(audioChunk);
resolve(null);
waitForMessage = new Promise((r) => (resolve = r));
}
};
socket.onerror = function (error) {
throw error;
};
// Handle socket closing
socket.onclose = function () {
isDone = true;
};
while (!isDone) {
await waitForMessage;
yield* chunks;
chunks = [];
}
})();
}
import OpenAI from 'openai';
import { ChatCompletionStream } from 'openai/lib/ChatCompletionStream';
export async function streamCompletion({ systemPrompt, messages }) {
const client = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
return client.beta.chat.completions.stream({
model: 'gpt-4-0125-preview',
messages: [{ role: 'system', content: systemPrompt }, ...messages],
});
}
export async function* llmMessageSource(llmStream: ChatCompletionStream): AsyncIterable<string> {
for await (const chunk of llmStream) {
const message = chunk.choices[0].delta.content;
if (message) {
yield message;
}
}
}
async function main(systemPrompt: string, prompt: string) {
const llmStream = await streamCompletion({
systemPrompt,
messages: [{ role: 'user', content: prompt }],
});
const llmMessageStream = llmMessageSource(llmStream);
console.log('Streaming LLM messages...');
for await (const audio of inputStreamTextToSpeech(llmMessageStream)) {
console.log(audio);
}
}
main('Hello, how can I help you today?', 'What is the meaning of life?');

1302
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -8,6 +8,11 @@ export namespace Agents {
export type ImageDetail = 'auto' | 'low' | 'high'; export type ImageDetail = 'auto' | 'low' | 'high';
export type ReasoningContentText = {
type: ContentTypes.THINK;
think: string;
};
export type MessageContentText = { export type MessageContentText = {
type: ContentTypes.TEXT; type: ContentTypes.TEXT;
text: string; text: string;
@ -20,6 +25,7 @@ export namespace Agents {
}; };
export type MessageContentComplex = export type MessageContentComplex =
| ReasoningContentText
| MessageContentText | MessageContentText
| MessageContentImageUrl | MessageContentImageUrl
// eslint-disable-next-line @typescript-eslint/no-explicit-any // eslint-disable-next-line @typescript-eslint/no-explicit-any
@ -212,12 +218,44 @@ export namespace Agents {
* The delta containing the fields that have changed on the Message. * The delta containing the fields that have changed on the Message.
*/ */
export interface MessageDelta { export interface MessageDelta {
/**
* The content of the message in array of text and/or images.
*/
content?: Agents.MessageContentComplex[];
}
/**
* Represents a reasoning delta i.e. any changed fields on a message during
* streaming.
*/
export interface ReasoningDeltaEvent {
/**
* The identifier of the message, which can be referenced in API endpoints.
*/
id: string;
/**
* The delta containing the fields that have changed.
*/
delta: ReasoningDelta;
}
/**
* The reasoning delta containing the fields that have changed on the Message.
*/
export interface ReasoningDelta {
/** /**
* The content of the message in array of text and/or images. * The content of the message in array of text and/or images.
*/ */
content?: MessageContentComplex[]; content?: MessageContentComplex[];
} }
export type ContentType = ContentTypes.TEXT | ContentTypes.IMAGE_URL | string;
export type ReasoningDeltaUpdate = { type: ContentTypes.THINK; think: string };
export type ContentType =
| ContentTypes.THINK
| ContentTypes.TEXT
| ContentTypes.IMAGE_URL
| string;
} }
export type ToolCallResult = { export type ToolCallResult = {

View file

@ -432,6 +432,7 @@ export type ContentPart = (
export type TMessageContentParts = export type TMessageContentParts =
| { type: ContentTypes.ERROR; text: Text & PartMetadata } | { type: ContentTypes.ERROR; text: Text & PartMetadata }
| { type: ContentTypes.THINK; think: string | (Text & PartMetadata) }
| { type: ContentTypes.TEXT; text: string | (Text & PartMetadata); tool_call_ids?: string[] } | { type: ContentTypes.TEXT; text: string | (Text & PartMetadata); tool_call_ids?: string[] }
| { | {
type: ContentTypes.TOOL_CALL; type: ContentTypes.TOOL_CALL;

View file

@ -1,5 +1,6 @@
export enum ContentTypes { export enum ContentTypes {
TEXT = 'text', TEXT = 'text',
THINK = 'think',
TEXT_DELTA = 'text_delta', TEXT_DELTA = 'text_delta',
TOOL_CALL = 'tool_call', TOOL_CALL = 'tool_call',
IMAGE_FILE = 'image_file', IMAGE_FILE = 'image_file',