mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-01-01 08:08:49 +01:00
* feat: update useTextToSpeech and useSpeechToText hooks to support external audio endpoints This commit updates the useTextToSpeech and useSpeechToText hooks in the Input directory to support external audio endpoints. It introduces the useGetExternalTextToSpeech and useGetExternalSpeechToText hooks, which determine whether the audio endpoints should be set to 'browser' or 'external' based on the value of the endpointTTS and endpointSTT Recoil states. The useTextToSpeech and useSpeechToText hooks now use these new hooks to determine whether to use external audio endpoints * feat: add userSelect style to ConversationModeSwitch label * fix: remove unused updateTokenWebsocket function and import The updateTokenWebsocket function and its import are no longer used in the OpenAIClient module. This commit removes the function and import to clean up the codebase * feat: support external audio endpoints in useTextToSpeech and useSpeechToText hooks This commit updates the useTextToSpeech and useSpeechToText hooks in the Input directory to support external audio endpoints. It introduces the useGetExternalTextToSpeech and useGetExternalSpeechToText hooks, which determine whether the audio endpoints should be set to 'browser' or 'external' based on the value of the endpointTTS and endpointSTT Recoil states. The useTextToSpeech and useSpeechToText hooks now use these new hooks to determine whether to use external audio endpoints * feat: update AutomaticPlayback component to AutomaticPlaybackSwitch; tests: added AutomaticPlaybackSwitch.spec > > This commit renames the AutomaticPlayback component to AutomaticPlaybackSwitch in the Speech directory. The new name better reflects the purpose of the component and aligns with the naming convention used in the codebase. * feat: update useSpeechToText hook to include interimTranscript This commit updates the useSpeechToText hook in the client/src/components/Chat/Input/AudioRecorder.tsx file to include the interimTranscript state. This allows for real-time display of the speech-to-text transcription while the user is still speaking. The interimTranscript is now used to update the text area value during recording. * feat: Add customConfigSpeech API endpoint for retrieving custom speech configuration This commit adds a new API endpoint in the file under the directory. This endpoint is responsible for retrieving the custom speech configuration using the function from the module * feat: update store var and ; fix: getCustomConfigSpeech * fix: client tests, removed unused import * feat: Update useCustomConfigSpeechQuery to return an array of custom speech configurations This commit modifies the useCustomConfigSpeechQuery function in the client/src/data-provider/queries.ts file to return an array of custom speech configurations instead of a single object. This change allows for better handling and manipulation of the data in the application * feat: Update useCustomConfigSpeechQuery to return an array of custom speech configurations * refactor: Update variable name in speechTab schema * refactor: removed unused and nested code * fix: using recoilState * refactor: Update Speech component to use useCallback for setting settings * fix: test * fix: tests * feature: ensure that the settings don't change after modifying then through the UI * remove comment * fix: Handle error gracefully in getCustomConfigSpeech and getVoices endpoints * fix: Handle error * fix: backend tests * fix: invalid custom config logging * chore: add back custom config info logging * chore: revert loadCustomConfig spec --------- Co-authored-by: Danny Avila <danny@librechat.ai>
220 lines
7.8 KiB
TypeScript
220 lines
7.8 KiB
TypeScript
import { memo, useRef, useMemo } from 'react';
|
|
import { useRecoilState, useRecoilValue } from 'recoil';
|
|
import {
|
|
supportsFiles,
|
|
mergeFileConfig,
|
|
isAssistantsEndpoint,
|
|
fileConfig as defaultFileConfig,
|
|
} from 'librechat-data-provider';
|
|
import {
|
|
useChatContext,
|
|
useAddedChatContext,
|
|
useAssistantsMapContext,
|
|
useChatFormContext,
|
|
} from '~/Providers';
|
|
import {
|
|
useTextarea,
|
|
useAutoSave,
|
|
useRequiresKey,
|
|
useHandleKeyUp,
|
|
useSubmitMessage,
|
|
} from '~/hooks';
|
|
import { TextareaAutosize } from '~/components/ui';
|
|
import { useGetFileConfig } from '~/data-provider';
|
|
import { cn, removeFocusRings } from '~/utils';
|
|
import TextareaHeader from './TextareaHeader';
|
|
import PromptsCommand from './PromptsCommand';
|
|
import AttachFile from './Files/AttachFile';
|
|
import AudioRecorder from './AudioRecorder';
|
|
import { mainTextareaId } from '~/common';
|
|
import StreamAudio from './StreamAudio';
|
|
import StopButton from './StopButton';
|
|
import SendButton from './SendButton';
|
|
import FileRow from './Files/FileRow';
|
|
import Mention from './Mention';
|
|
import store from '~/store';
|
|
|
|
const ChatForm = ({ index = 0 }) => {
|
|
const submitButtonRef = useRef<HTMLButtonElement>(null);
|
|
const textAreaRef = useRef<HTMLTextAreaElement | null>(null);
|
|
|
|
const SpeechToText = useRecoilValue(store.speechToText);
|
|
const TextToSpeech = useRecoilValue(store.textToSpeech);
|
|
const automaticPlayback = useRecoilValue(store.automaticPlayback);
|
|
|
|
const [showStopButton, setShowStopButton] = useRecoilState(store.showStopButtonByIndex(index));
|
|
const [showPlusPopover, setShowPlusPopover] = useRecoilState(store.showPlusPopoverFamily(index));
|
|
const [showMentionPopover, setShowMentionPopover] = useRecoilState(
|
|
store.showMentionPopoverFamily(index),
|
|
);
|
|
|
|
const { requiresKey } = useRequiresKey();
|
|
const handleKeyUp = useHandleKeyUp({
|
|
index,
|
|
textAreaRef,
|
|
setShowPlusPopover,
|
|
setShowMentionPopover,
|
|
});
|
|
const { handlePaste, handleKeyDown, handleCompositionStart, handleCompositionEnd } = useTextarea({
|
|
textAreaRef,
|
|
submitButtonRef,
|
|
disabled: !!requiresKey,
|
|
});
|
|
|
|
const {
|
|
files,
|
|
setFiles,
|
|
conversation,
|
|
isSubmitting,
|
|
filesLoading,
|
|
setFilesLoading,
|
|
newConversation,
|
|
handleStopGenerating,
|
|
} = useChatContext();
|
|
const methods = useChatFormContext();
|
|
const {
|
|
addedIndex,
|
|
generateConversation,
|
|
conversation: addedConvo,
|
|
setConversation: setAddedConvo,
|
|
isSubmitting: isSubmittingAdded,
|
|
} = useAddedChatContext();
|
|
const showStopAdded = useRecoilValue(store.showStopButtonByIndex(addedIndex));
|
|
|
|
const { clearDraft } = useAutoSave({
|
|
conversationId: useMemo(() => conversation?.conversationId, [conversation]),
|
|
textAreaRef,
|
|
files,
|
|
setFiles,
|
|
});
|
|
|
|
const assistantMap = useAssistantsMapContext();
|
|
const { submitMessage, submitPrompt } = useSubmitMessage({ clearDraft });
|
|
|
|
const { endpoint: _endpoint, endpointType } = conversation ?? { endpoint: null };
|
|
const endpoint = endpointType ?? _endpoint;
|
|
|
|
const { data: fileConfig = defaultFileConfig } = useGetFileConfig({
|
|
select: (data) => mergeFileConfig(data),
|
|
});
|
|
|
|
const endpointFileConfig = fileConfig.endpoints[endpoint ?? ''];
|
|
const invalidAssistant = useMemo(
|
|
() =>
|
|
isAssistantsEndpoint(conversation?.endpoint) &&
|
|
(!conversation?.assistant_id ||
|
|
!assistantMap?.[conversation?.endpoint ?? '']?.[conversation?.assistant_id ?? '']),
|
|
[conversation?.assistant_id, conversation?.endpoint, assistantMap],
|
|
);
|
|
const disableInputs = useMemo(
|
|
() => !!(requiresKey || invalidAssistant),
|
|
[requiresKey, invalidAssistant],
|
|
);
|
|
|
|
const { ref, ...registerProps } = methods.register('text', {
|
|
required: true,
|
|
onChange: (e) => {
|
|
methods.setValue('text', e.target.value, { shouldValidate: true });
|
|
},
|
|
});
|
|
|
|
return (
|
|
<form
|
|
onSubmit={methods.handleSubmit((data) => submitMessage(data))}
|
|
className="stretch mx-2 flex flex-row gap-3 last:mb-2 md:mx-4 md:last:mb-6 lg:mx-auto lg:max-w-2xl xl:max-w-3xl"
|
|
>
|
|
<div className="relative flex h-full flex-1 items-stretch md:flex-col">
|
|
<div className="flex w-full items-center">
|
|
{showPlusPopover && !isAssistantsEndpoint(endpoint) && (
|
|
<Mention
|
|
setShowMentionPopover={setShowPlusPopover}
|
|
newConversation={generateConversation}
|
|
textAreaRef={textAreaRef}
|
|
commandChar="+"
|
|
placeholder="com_ui_add"
|
|
includeAssistants={false}
|
|
/>
|
|
)}
|
|
{showMentionPopover && (
|
|
<Mention
|
|
setShowMentionPopover={setShowMentionPopover}
|
|
newConversation={newConversation}
|
|
textAreaRef={textAreaRef}
|
|
/>
|
|
)}
|
|
<PromptsCommand index={index} textAreaRef={textAreaRef} submitPrompt={submitPrompt} />
|
|
<div className="bg-token-main-surface-primary relative flex w-full flex-grow flex-col overflow-hidden rounded-2xl border dark:border-gray-600 dark:text-white [&:has(textarea:focus)]:border-gray-300 [&:has(textarea:focus)]:shadow-[0_2px_6px_rgba(0,0,0,.05)] dark:[&:has(textarea:focus)]:border-gray-500">
|
|
<TextareaHeader addedConvo={addedConvo} setAddedConvo={setAddedConvo} />
|
|
<FileRow
|
|
files={files}
|
|
setFiles={setFiles}
|
|
setFilesLoading={setFilesLoading}
|
|
Wrapper={({ children }) => (
|
|
<div className="mx-2 mt-2 flex flex-wrap gap-2 px-2.5 md:pl-0 md:pr-4">
|
|
{children}
|
|
</div>
|
|
)}
|
|
/>
|
|
{endpoint && (
|
|
<TextareaAutosize
|
|
{...registerProps}
|
|
autoFocus
|
|
ref={(e) => {
|
|
ref(e);
|
|
textAreaRef.current = e;
|
|
}}
|
|
disabled={disableInputs}
|
|
onPaste={handlePaste}
|
|
onKeyDown={handleKeyDown}
|
|
onKeyUp={handleKeyUp}
|
|
onCompositionStart={handleCompositionStart}
|
|
onCompositionEnd={handleCompositionEnd}
|
|
id={mainTextareaId}
|
|
tabIndex={0}
|
|
data-testid="text-input"
|
|
style={{ height: 44, overflowY: 'auto' }}
|
|
rows={1}
|
|
className={cn(
|
|
supportsFiles[endpointType ?? endpoint ?? ''] && !endpointFileConfig?.disabled
|
|
? ' pl-10 md:pl-[55px]'
|
|
: 'pl-3 md:pl-4',
|
|
'm-0 w-full resize-none border-0 bg-transparent py-[10px] placeholder-black/50 focus:ring-0 focus-visible:ring-0 dark:bg-transparent dark:placeholder-white/50 md:py-3.5 ',
|
|
SpeechToText ? 'pr-20 md:pr-[85px]' : 'pr-10 md:pr-12',
|
|
'max-h-[65vh] md:max-h-[75vh]',
|
|
removeFocusRings,
|
|
)}
|
|
/>
|
|
)}
|
|
<AttachFile
|
|
endpoint={_endpoint ?? ''}
|
|
endpointType={endpointType}
|
|
disabled={disableInputs}
|
|
/>
|
|
{(isSubmitting || isSubmittingAdded) && (showStopButton || showStopAdded) ? (
|
|
<StopButton stop={handleStopGenerating} setShowStopButton={setShowStopButton} />
|
|
) : (
|
|
endpoint && (
|
|
<SendButton
|
|
ref={submitButtonRef}
|
|
control={methods.control}
|
|
disabled={!!(filesLoading || isSubmitting || disableInputs)}
|
|
/>
|
|
)
|
|
)}
|
|
{SpeechToText && (
|
|
<AudioRecorder
|
|
disabled={!!disableInputs}
|
|
textAreaRef={textAreaRef}
|
|
ask={submitMessage}
|
|
methods={methods}
|
|
/>
|
|
)}
|
|
{TextToSpeech && automaticPlayback && <StreamAudio index={index} />}
|
|
</div>
|
|
</div>
|
|
</div>
|
|
</form>
|
|
);
|
|
};
|
|
|
|
export default memo(ChatForm);
|