🏄‍♂️ refactor: Optimize Reasoning UI & Token Streaming (#5546)

*  feat: Implement Show Thinking feature; refactor: testing thinking render optimizations

*  feat: Refactor Thinking component styles and enhance Markdown rendering

* chore: add back removed code, revert type changes

* chore: Add back resetCounter effect to Markdown component for improved code block indexing

* chore: bump @librechat/agents and google langchain packages

* WIP: reasoning type updates

* WIP: first pass, reasoning content blocks

* chore: revert code

* chore: bump @librechat/agents

* refactor: optimize reasoning tag handling

* style: ul indent padding

* feat: add Reasoning component to handle reasoning display

* feat: first pass, content reasoning part styling

* refactor: add content placeholder for endpoints using new stream handler

* refactor: only cache messages when requesting stream audio

* fix: circular dep.

* fix: add default param

* refactor: tts, only request after message stream, fix chrome autoplay

* style: update label for submitting state and add localization for 'Thinking...'

* fix: improve global audio pause logic and reset active run ID

* fix: handle artifact edge cases

* fix: remove unnecessary console log from artifact update test

* feat: add support for continued message handling with new streaming method

---------

Co-authored-by: Marco Beretta <81851188+berry-13@users.noreply.github.com>
This commit is contained in:
Danny Avila 2025-01-29 19:46:58 -05:00 committed by GitHub
parent d60a149ad9
commit 591a019766
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
48 changed files with 1791 additions and 726 deletions

View file

@ -4,9 +4,9 @@ import {
Constants,
QueryKeys,
ContentTypes,
EModelEndpoint,
parseCompactConvo,
isAssistantsEndpoint,
EModelEndpoint,
} from 'librechat-data-provider';
import { useSetRecoilState, useResetRecoilState, useRecoilValue } from 'recoil';
import type {
@ -31,6 +31,15 @@ const logChatRequest = (request: Record<string, unknown>) => {
logger.log('=====================================');
};
const usesContentStream = (endpoint: EModelEndpoint | undefined, endpointType?: string) => {
if (endpointType === EModelEndpoint.custom) {
return true;
}
if (endpoint === EModelEndpoint.openAI || endpoint === EModelEndpoint.azureOpenAI) {
return true;
}
};
export default function useChatFunctions({
index = 0,
files,
@ -219,8 +228,8 @@ export default function useChatFunctions({
unfinished: false,
isCreatedByUser: false,
isEdited: isEditOrContinue,
iconURL: convo.iconURL,
model: convo.model,
iconURL: convo?.iconURL,
model: convo?.model,
error: false,
};
@ -247,6 +256,17 @@ export default function useChatFunctions({
},
];
setShowStopButton(true);
} else if (usesContentStream(endpoint, endpointType)) {
initialResponse.text = '';
initialResponse.content = [
{
type: ContentTypes.TEXT,
[ContentTypes.TEXT]: {
value: responseText,
},
},
];
setShowStopButton(true);
} else {
setShowStopButton(true);
}