🔊 fix(tts): NotAllowedError (mobile/safari), Unsupported MediaSource type (firefox), Hide Audio Element (#2854)

* fix: hide audio element on mobile

* chore: add tts docs link

* fix: select voice option on first render

* fix: NotAllowedError, prevent async playback for mobile triggers, consolidate MessageAudio code, user user-triggered unmutes

* fix: Firefox/unsupported type for MediaSource hack

* refactor(STT): make icon red when recording. consolidate logic to AudioRecorder component

* fix: revert Redis changes to use separate client for sessions
This commit is contained in:
Danny Avila 2024-05-24 12:18:11 -04:00 committed by GitHub
parent dcd2e3e62d
commit 35ba4ba1a4
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
14 changed files with 421 additions and 130 deletions

7
api/cache/redis.js vendored
View file

@ -1,9 +1,4 @@
const Redis = require('ioredis');
const { logger } = require('~/config');
const { REDIS_URI } = process.env ?? {};
const redis = new Redis(REDIS_URI);
redis
.on('error', (err) => logger.error('ioredis error:', err))
.on('ready', () => logger.info('ioredis successfully initialized.'))
.on('reconnecting', () => logger.info('ioredis reconnecting...'));
const redis = new Redis.Cluster(REDIS_URI);
module.exports = redis;

View file

@ -16,7 +16,8 @@ const { logger } = require('~/config');
function getProvider(ttsSchema) {
if (!ttsSchema) {
throw new Error(`No TTS schema is set. Did you configure TTS in the custom config (librechat.yaml)?
# Example TTS configuration`);
https://www.librechat.ai/docs/configuration/stt_tts#tts`);
}
const providers = Object.entries(ttsSchema).filter(([, value]) => Object.keys(value).length > 0);

View file

@ -1,14 +1,15 @@
const Redis = require('ioredis');
const passport = require('passport');
const session = require('express-session');
const RedisStore = require('connect-redis').default;
const passport = require('passport');
const {
setupOpenId,
googleLogin,
githubLogin,
discordLogin,
facebookLogin,
setupOpenId,
} = require('../strategies');
const client = require('../cache/redis');
} = require('~/strategies');
const { logger } = require('~/config');
/**
*
@ -40,6 +41,11 @@ const configureSocialLogins = (app) => {
saveUninitialized: false,
};
if (process.env.USE_REDIS) {
const client = new Redis(process.env.REDIS_URI);
client
.on('error', (err) => logger.error('ioredis error:', err))
.on('ready', () => logger.info('ioredis successfully initialized.'))
.on('reconnecting', () => logger.info('ioredis reconnecting...'));
sessionOptions.store = new RedisStore({ client, prefix: 'librechat' });
}
app.use(session(sessionOptions));

View file

@ -1,16 +1,46 @@
import React from 'react';
import { ListeningIcon, Spinner, SpeechIcon } from '~/components/svg';
import { useEffect } from 'react';
import type { UseFormReturn } from 'react-hook-form';
import { TooltipProvider, Tooltip, TooltipTrigger, TooltipContent } from '~/components/ui/';
import { useLocalize } from '~/hooks';
import { ListeningIcon, Spinner } from '~/components/svg';
import { useLocalize, useSpeechToText } from '~/hooks';
import { globalAudioId } from '~/common';
export default function AudioRecorder({
isListening,
isLoading,
startRecording,
stopRecording,
textAreaRef,
methods,
ask,
disabled,
}: {
textAreaRef: React.RefObject<HTMLTextAreaElement>;
methods: UseFormReturn<{ text: string }>;
ask: (data: { text: string }) => void;
disabled: boolean;
}) {
const localize = useLocalize();
const handleTranscriptionComplete = (text: string) => {
if (text) {
const globalAudio = document.getElementById(globalAudioId) as HTMLAudioElement;
if (globalAudio) {
console.log('Unmuting global audio');
globalAudio.muted = false;
}
ask({ text });
methods.reset({ text: '' });
clearText();
}
};
const { isListening, isLoading, startRecording, stopRecording, speechText, clearText } =
useSpeechToText(handleTranscriptionComplete);
useEffect(() => {
if (textAreaRef.current) {
textAreaRef.current.value = speechText;
methods.setValue('text', speechText, { shouldValidate: true });
}
}, [speechText, methods, textAreaRef]);
const handleStartRecording = async () => {
await startRecording();
};
@ -19,6 +49,16 @@ export default function AudioRecorder({
await stopRecording();
};
const renderIcon = () => {
if (isListening) {
return <ListeningIcon className="stroke-red-500" />;
}
if (isLoading) {
return <Spinner className="stroke-gray-700 dark:stroke-gray-300" />;
}
return <ListeningIcon className="stroke-gray-700 dark:stroke-gray-300" />;
};
return (
<TooltipProvider delayDuration={250}>
<Tooltip>
@ -29,13 +69,7 @@ export default function AudioRecorder({
className="absolute bottom-1.5 right-12 flex h-[30px] w-[30px] items-center justify-center rounded-lg p-0.5 transition-colors hover:bg-gray-200 dark:hover:bg-gray-700 md:bottom-3 md:right-12"
type="button"
>
{isListening ? (
<SpeechIcon className="stroke-gray-700 dark:stroke-gray-300" />
) : isLoading ? (
<Spinner className="stroke-gray-700 dark:stroke-gray-300" />
) : (
<ListeningIcon className="stroke-gray-700 dark:stroke-gray-300" />
)}
{renderIcon()}
</button>
</TooltipTrigger>
<TooltipContent side="top" sideOffset={10}>

View file

@ -1,6 +1,6 @@
import { useForm } from 'react-hook-form';
import { useRecoilState, useRecoilValue } from 'recoil';
import { memo, useCallback, useRef, useMemo, useEffect } from 'react';
import { memo, useCallback, useRef, useMemo } from 'react';
import {
supportsFiles,
mergeFileConfig,
@ -8,7 +8,7 @@ import {
fileConfig as defaultFileConfig,
} from 'librechat-data-provider';
import { useChatContext, useAssistantsMapContext } from '~/Providers';
import { useRequiresKey, useTextarea, useSpeechToText } from '~/hooks';
import { useRequiresKey, useTextarea } from '~/hooks';
import { TextareaAutosize } from '~/components/ui';
import { useGetFileConfig } from '~/data-provider';
import { cn, removeFocusOutlines } from '~/utils';
@ -72,24 +72,6 @@ const ChatForm = ({ index = 0 }) => {
const { endpoint: _endpoint, endpointType } = conversation ?? { endpoint: null };
const endpoint = endpointType ?? _endpoint;
const handleTranscriptionComplete = (text: string) => {
if (text) {
ask({ text });
methods.reset({ text: '' });
clearText();
}
};
const { isListening, isLoading, startRecording, stopRecording, speechText, clearText } =
useSpeechToText(handleTranscriptionComplete);
useEffect(() => {
if (textAreaRef.current) {
textAreaRef.current.value = speechText;
methods.setValue('text', speechText, { shouldValidate: true });
}
}, [speechText, methods]);
const { data: fileConfig = defaultFileConfig } = useGetFileConfig({
select: (data) => mergeFileConfig(data),
});
@ -183,11 +165,10 @@ const ChatForm = ({ index = 0 }) => {
)}
{SpeechToText && (
<AudioRecorder
isListening={isListening}
isLoading={isLoading}
startRecording={startRecording}
stopRecording={stopRecording}
disabled={!!disableInputs}
textAreaRef={textAreaRef}
ask={submitMessage}
methods={methods}
/>
)}
{TextToSpeech && automaticPlayback && <StreamAudio index={index} />}

View file

@ -88,7 +88,7 @@ export default function StreamAudio({ index = 0 }) {
return;
}
console.log('Fetching audio...');
console.log('Fetching audio...', navigator.userAgent);
const response = await fetch('/api/files/tts', {
method: 'POST',
headers: { 'Content-Type': 'application/json', Authorization: `Bearer ${token}` },
@ -103,8 +103,14 @@ export default function StreamAudio({ index = 0 }) {
}
const reader = response.body.getReader();
const mediaSource = new MediaSourceAppender('audio/mpeg');
setGlobalAudioURL(mediaSource.mediaSourceUrl);
const type = 'audio/mpeg';
const browserSupportsType = MediaSource.isTypeSupported(type);
let mediaSource: MediaSourceAppender | undefined;
if (browserSupportsType) {
mediaSource = new MediaSourceAppender(type);
setGlobalAudioURL(mediaSource.mediaSourceUrl);
}
setAudioRunId(activeRunId);
let done = false;
@ -120,7 +126,7 @@ export default function StreamAudio({ index = 0 }) {
if (cacheTTS && value) {
chunks.push(value);
}
if (value) {
if (value && mediaSource) {
mediaSource.addData(value);
}
done = readerDone;
@ -136,8 +142,19 @@ export default function StreamAudio({ index = 0 }) {
if (!cacheKey) {
throw new Error('Cache key not found');
}
const audioBlob = new Blob(chunks, { type: 'audio/mpeg' });
cache.put(cacheKey, new Response(audioBlob));
const audioBlob = new Blob(chunks, { type });
const cachedResponse = new Response(audioBlob);
await cache.put(cacheKey, cachedResponse);
if (!browserSupportsType) {
const unconsumedResponse = await cache.match(cacheKey);
if (!unconsumedResponse) {
throw new Error('Failed to fetch audio from cache');
}
const audioBlob = await unconsumedResponse.blob();
const blobUrl = URL.createObjectURL(audioBlob);
setGlobalAudioURL(blobUrl);
}
setIsFetching(false);
}
console.log('Audio stream reading ended');
@ -194,9 +211,16 @@ export default function StreamAudio({ index = 0 }) {
ref={audioRef}
controls
controlsList="nodownload nofullscreen noremoteplayback"
className="absolute h-0 w-0 overflow-hidden"
style={{
position: 'absolute',
overflow: 'hidden',
display: 'none',
height: '0px',
width: '0px',
}}
src={globalAudioURL || undefined}
id={globalAudioId}
muted
autoPlay
/>
);

View file

@ -1,18 +1,10 @@
import React, { useState } from 'react';
import { useRecoilState } from 'recoil';
import type { TConversation, TMessage } from 'librechat-data-provider';
import {
Clipboard,
CheckMark,
EditIcon,
RegenerateIcon,
ContinueIcon,
VolumeIcon,
VolumeMuteIcon,
Spinner,
} from '~/components/svg';
import { useGenerationsByLatest, useLocalize, useTextToSpeech } from '~/hooks';
import { EditIcon, Clipboard, CheckMark, ContinueIcon, RegenerateIcon } from '~/components/svg';
import { useGenerationsByLatest, useLocalize } from '~/hooks';
import { Fork } from '~/components/Conversations';
import MessageAudio from './MessageAudio';
import { cn } from '~/utils';
import store from '~/store';
@ -49,12 +41,6 @@ export default function HoverButtons({
const [isCopied, setIsCopied] = useState(false);
const [TextToSpeech] = useRecoilState<boolean>(store.TextToSpeech);
const { handleMouseDown, handleMouseUp, toggleSpeech, isSpeaking, isLoading } = useTextToSpeech(
message?.content ?? message?.text ?? '',
isLast,
index,
);
const {
hideEditButton,
regenerateEnabled,
@ -81,32 +67,9 @@ export default function HoverButtons({
enterEdit();
};
const renderIcon = (size: string) => {
if (isLoading) {
return <Spinner size={size} />;
}
if (isSpeaking) {
return <VolumeMuteIcon size={size} />;
}
return <VolumeIcon size={size} />;
};
return (
<div className="visible mt-0 flex justify-center gap-1 self-end text-gray-400 lg:justify-start">
{TextToSpeech && (
<button
className="hover-button rounded-md p-1 pl-0 text-gray-400 hover:text-gray-950 dark:text-gray-400/70 dark:hover:text-gray-200 disabled:dark:hover:text-gray-400 md:group-hover:visible md:group-[.final-completion]:visible"
onMouseDown={handleMouseDown}
onMouseUp={handleMouseUp}
onClick={toggleSpeech}
type="button"
title={isSpeaking ? localize('com_ui_stop') : localize('com_ui_read_aloud')}
>
{renderIcon('19')}
</button>
)}
{TextToSpeech && <MessageAudio index={index} message={message} isLast={isLast} />}
{isEditableEndpoint && (
<button
className={cn(

View file

@ -0,0 +1,89 @@
import { useEffect } from 'react';
import { useRecoilValue } from 'recoil';
import type { TMessage } from 'librechat-data-provider';
import { VolumeIcon, VolumeMuteIcon, Spinner } from '~/components/svg';
import { useLocalize, useTextToSpeech } from '~/hooks';
import store from '~/store';
type THoverButtons = {
message: TMessage;
isLast: boolean;
index: number;
};
export default function MessageAudio({ index, message, isLast }: THoverButtons) {
const localize = useLocalize();
const playbackRate = useRecoilValue(store.playbackRate);
const { toggleSpeech, isSpeaking, isLoading, audioRef } = useTextToSpeech(message, isLast, index);
const renderIcon = (size: string) => {
if (isLoading) {
return <Spinner size={size} />;
}
if (isSpeaking) {
return <VolumeMuteIcon size={size} />;
}
return <VolumeIcon size={size} />;
};
useEffect(() => {
const messageAudio = document.getElementById(
`audio-${message.messageId}`,
) as HTMLAudioElement | null;
if (!messageAudio) {
return;
}
if (playbackRate && messageAudio && messageAudio.playbackRate !== playbackRate) {
messageAudio.playbackRate = playbackRate;
}
}, [audioRef, isSpeaking, playbackRate, message.messageId]);
return (
<>
<button
className="hover-button rounded-md p-1 pl-0 text-gray-400 hover:text-gray-950 dark:text-gray-400/70 dark:hover:text-gray-200 disabled:dark:hover:text-gray-400 md:group-hover:visible md:group-[.final-completion]:visible"
// onMouseDownCapture={() => {
// if (audioRef.current) {
// audioRef.current.muted = false;
// }
// handleMouseDown();
// }}
// onMouseUpCapture={() => {
// if (audioRef.current) {
// audioRef.current.muted = false;
// }
// handleMouseUp();
// }}
onClickCapture={() => {
if (audioRef.current) {
audioRef.current.muted = false;
}
toggleSpeech();
}}
type="button"
title={isSpeaking ? localize('com_ui_stop') : localize('com_ui_read_aloud')}
>
{renderIcon('19')}
</button>
<audio
ref={audioRef}
controls
controlsList="nodownload nofullscreen noremoteplayback"
style={{
position: 'absolute',
overflow: 'hidden',
display: 'none',
height: '0px',
width: '0px',
}}
src={audioRef.current?.src || undefined}
id={`audio-${message.messageId}`}
muted
autoPlay
/>
</>
);
}

View file

@ -1,15 +1,21 @@
import { useMemo } from 'react';
import { useRecoilState } from 'recoil';
import { useMemo, useEffect } from 'react';
import Dropdown from '~/components/ui/DropdownNoState';
import { useVoicesQuery } from '~/data-provider';
import { Dropdown } from '~/components/ui';
import { useLocalize } from '~/hooks';
import store from '~/store';
export default function VoiceDropdown() {
const localize = useLocalize();
const [voice, setVoice] = useRecoilState<string>(store.voice);
const [voice, setVoice] = useRecoilState(store.voice);
const { data } = useVoicesQuery();
useEffect(() => {
if (!voice && data?.length) {
setVoice(data[0]);
}
}, [voice, data, setVoice]);
const voiceOptions = useMemo(
() => (data ?? []).map((v: string) => ({ value: v, display: v })),
[data],

View file

@ -0,0 +1,105 @@
import React, { FC } from 'react';
import { Listbox } from '@headlessui/react';
import { cn } from '~/utils/';
type OptionType = {
value: string;
display?: string;
};
type DropdownPosition = 'left' | 'right';
interface DropdownProps {
value: string;
label?: string;
onChange: (value: string) => void;
options: (string | OptionType)[];
className?: string;
position?: DropdownPosition;
width?: number;
maxHeight?: string;
testId?: string;
}
const Dropdown: FC<DropdownProps> = ({
value,
label = '',
onChange,
options,
className = '',
position = 'right',
width,
maxHeight = 'auto',
testId = 'dropdown-menu',
}) => {
const positionClasses = {
right: 'origin-bottom-left left-0',
left: 'origin-bottom-right right-0',
};
return (
<div className={cn('relative', className)}>
<Listbox
value={value}
onChange={(newValue) => {
onChange(newValue);
}}
>
<div className={cn('relative', className)}>
<Listbox.Button
data-testid={testId}
className={cn(
'relative inline-flex items-center justify-between rounded-md border-gray-300 bg-white py-2 pl-3 pr-8 text-gray-700 hover:bg-gray-50 dark:border-gray-600 dark:bg-gray-700 dark:text-white dark:hover:bg-gray-600',
'w-auto',
className,
)}
>
<span className="block truncate">
{label}
{options
.map((o) => (typeof o === 'string' ? { value: o, display: o } : o))
.find((o) => o.value === value)?.display || value}
</span>
<span className="pointer-events-none absolute inset-y-0 right-0 flex items-center pr-2">
<svg
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 24 24"
strokeWidth="2"
stroke="currentColor"
className="h-4 w-5 rotate-0 transform text-gray-400 transition-transform duration-300 ease-in-out"
>
<polyline points="6 9 12 15 18 9"></polyline>
</svg>
</span>
</Listbox.Button>
<Listbox.Options
className={cn(
`absolute z-50 mt-1 flex max-h-[40vh] flex-col items-start gap-1 overflow-auto rounded-lg border border-gray-300 bg-white p-1.5 text-gray-700 shadow-lg transition-opacity focus:outline-none dark:border-gray-600 dark:bg-gray-700 dark:text-white ${positionClasses[position]}`,
className,
)}
style={{ width: width ? `${width}px` : 'auto', maxHeight: maxHeight }}
>
{options.map((item, index) => (
<Listbox.Option
key={index}
value={typeof item === 'string' ? item : item.value}
className={cn(
'relative cursor-pointer select-none rounded border-gray-300 bg-white py-2.5 pl-3 pr-6 text-gray-700 hover:bg-gray-100 dark:border-gray-300 dark:bg-gray-700 dark:text-white dark:hover:bg-gray-600',
)}
style={{ width: '100%' }}
data-theme={typeof item === 'string' ? item : (item as OptionType).value}
>
<span className="block truncate">
{typeof item === 'string' ? item : (item as OptionType).display}
</span>
</Listbox.Option>
))}
</Listbox.Options>
</div>
</Listbox>
</div>
);
};
export default Dropdown;

View file

@ -0,0 +1,48 @@
import { useEffect, useRef } from 'react';
export default function useCustomAudioRef({
setIsPlaying,
}: {
setIsPlaying: (isPlaying: boolean) => void;
}) {
const audioRef = useRef<HTMLAudioElement | null>(null);
useEffect(() => {
const handleEnded = () => {
setIsPlaying(false);
console.log('message audio ended');
if (audioRef.current) {
URL.revokeObjectURL(audioRef.current.src);
}
};
const handleStart = () => {
setIsPlaying(true);
console.log('message audio started');
};
const handlePause = () => {
setIsPlaying(false);
console.log('message audio paused');
};
const audioElement = audioRef.current;
if (audioRef.current) {
audioRef.current.muted = true;
audioRef.current.addEventListener('ended', handleEnded);
audioRef.current.addEventListener('play', handleStart);
audioRef.current.addEventListener('pause', handlePause);
}
return () => {
if (audioElement) {
audioElement.removeEventListener('ended', handleEnded);
audioElement.removeEventListener('play', handleStart);
audioElement.removeEventListener('pause', handlePause);
URL.revokeObjectURL(audioElement.src);
}
};
}, [setIsPlaying]);
return { audioRef };
}

View file

@ -1,13 +1,13 @@
import { useRef } from 'react';
import { parseTextParts } from 'librechat-data-provider';
import type { TMessageContentParts } from 'librechat-data-provider';
import type { TMessage } from 'librechat-data-provider';
import useTextToSpeechExternal from './useTextToSpeechExternal';
import useTextToSpeechBrowser from './useTextToSpeechBrowser';
import { usePauseGlobalAudio } from '../Audio';
import { useRecoilState } from 'recoil';
import store from '~/store';
const useTextToSpeech = (message: string | TMessageContentParts[], isLast: boolean, index = 0) => {
const useTextToSpeech = (message: TMessage, isLast: boolean, index = 0) => {
const [endpointTTS] = useRecoilState<string>(store.endpointTTS);
const useExternalTextToSpeech = endpointTTS === 'external';
@ -22,7 +22,8 @@ const useTextToSpeech = (message: string | TMessageContentParts[], isLast: boole
cancelSpeech: cancelSpeechExternal,
isSpeaking: isSpeakingExternal,
isLoading: isLoading,
} = useTextToSpeechExternal(isLast, index);
audioRef,
} = useTextToSpeechExternal(message.messageId, isLast, index);
const { pauseGlobalAudio } = usePauseGlobalAudio(index);
const generateSpeech = useExternalTextToSpeech ? generateSpeechExternal : generateSpeechLocal;
@ -36,8 +37,10 @@ const useTextToSpeech = (message: string | TMessageContentParts[], isLast: boole
isMouseDownRef.current = true;
timerRef.current = window.setTimeout(() => {
if (isMouseDownRef.current) {
const parsedMessage = typeof message === 'string' ? message : parseTextParts(message);
generateSpeech(parsedMessage, true);
const messageContent = message?.content ?? message?.text ?? '';
const parsedMessage =
typeof messageContent === 'string' ? messageContent : parseTextParts(messageContent);
generateSpeech(parsedMessage, false);
}
}, 1000);
};
@ -51,10 +54,13 @@ const useTextToSpeech = (message: string | TMessageContentParts[], isLast: boole
const toggleSpeech = () => {
if (isSpeaking) {
console.log('canceling message audio speech');
cancelSpeech();
pauseGlobalAudio();
} else {
const parsedMessage = typeof message === 'string' ? message : parseTextParts(message);
const messageContent = message?.content ?? message?.text ?? '';
const parsedMessage =
typeof messageContent === 'string' ? messageContent : parseTextParts(messageContent);
generateSpeech(parsedMessage, false);
}
};
@ -65,6 +71,7 @@ const useTextToSpeech = (message: string | TMessageContentParts[], isLast: boole
toggleSpeech,
isSpeaking,
isLoading,
audioRef,
};
};

View file

@ -1,6 +1,7 @@
import { useRecoilValue } from 'recoil';
import { useCallback, useEffect, useState, useMemo, useRef } from 'react';
import { useState, useMemo, useRef, useCallback, useEffect } from 'react';
import { useTextToSpeechMutation } from '~/data-provider';
import useAudioRef from '~/hooks/Audio/useAudioRef';
import useLocalize from '~/hooks/useLocalize';
import { useToastContext } from '~/Providers';
import store from '~/store';
@ -12,23 +13,28 @@ const createFormData = (text: string, voice: string) => {
return formData;
};
function useTextToSpeechExternal(isLast: boolean, index = 0) {
function useTextToSpeechExternal(messageId: string, isLast: boolean, index = 0) {
const localize = useLocalize();
const { showToast } = useToastContext();
const voice = useRecoilValue(store.voice);
const cacheTTS = useRecoilValue(store.cacheTTS);
const playbackRate = useRecoilValue(store.playbackRate);
const audioRef = useRef<HTMLAudioElement | null>(null);
const [downloadFile, setDownloadFile] = useState(false);
const [isLocalSpeaking, setIsSpeaking] = useState(false);
const { audioRef } = useAudioRef({ setIsPlaying: setIsSpeaking });
const promiseAudioRef = useRef<HTMLAudioElement | null>(null);
/* Global Audio Variables */
const globalIsFetching = useRecoilValue(store.globalAudioFetchingFamily(index));
const globalIsPlaying = useRecoilValue(store.globalAudioPlayingFamily(index));
const playAudio = (blobUrl: string) => {
const autoPlayAudio = (blobUrl: string) => {
const newAudio = new Audio(blobUrl);
audioRef.current = newAudio;
};
const playAudioPromise = (blobUrl: string) => {
const newAudio = new Audio(blobUrl);
const initializeAudio = () => {
if (playbackRate && playbackRate !== 1) {
@ -53,12 +59,12 @@ function useTextToSpeechExternal(isLast: boolean, index = 0) {
});
newAudio.onended = () => {
console.log('Target message audio ended');
console.log('Cached message audio ended');
URL.revokeObjectURL(blobUrl);
setIsSpeaking(false);
};
audioRef.current = newAudio;
promiseAudioRef.current = newAudio;
};
const downloadAudio = (blobUrl: string) => {
@ -95,7 +101,7 @@ function useTextToSpeechExternal(isLast: boolean, index = 0) {
if (downloadFile) {
downloadAudio(blobUrl);
}
playAudio(blobUrl);
autoPlayAudio(blobUrl);
} catch (error) {
showToast({
message: `Error processing audio: ${(error as Error).message}`,
@ -111,38 +117,58 @@ function useTextToSpeechExternal(isLast: boolean, index = 0) {
},
});
const generateSpeechExternal = async (text: string, download: boolean) => {
const cachedResponse = await caches.match(text);
const startMutation = (text: string, download: boolean) => {
const formData = createFormData(text, voice);
setDownloadFile(download);
processAudio(formData);
};
if (cachedResponse && cacheTTS) {
handleCachedResponse(cachedResponse, download);
const generateSpeechExternal = (text: string, download: boolean) => {
if (cacheTTS) {
handleCachedResponse(text, download);
} else {
const formData = createFormData(text, voice);
setDownloadFile(download);
processAudio(formData);
startMutation(text, download);
}
};
const handleCachedResponse = async (cachedResponse: Response, download: boolean) => {
const handleCachedResponse = async (text: string, download: boolean) => {
const cachedResponse = await caches.match(text);
if (!cachedResponse) {
return startMutation(text, download);
}
const audioBlob = await cachedResponse.blob();
const blobUrl = URL.createObjectURL(audioBlob);
if (download) {
downloadAudio(blobUrl);
} else {
playAudio(blobUrl);
playAudioPromise(blobUrl);
}
};
const cancelSpeech = useCallback(() => {
if (audioRef.current) {
audioRef.current.pause();
audioRef.current.src && URL.revokeObjectURL(audioRef.current.src);
audioRef.current = null;
const cancelSpeech = () => {
const messageAudio = document.getElementById(`audio-${messageId}`) as HTMLAudioElement | null;
const pauseAudio = (currentElement: HTMLAudioElement | null) => {
if (currentElement) {
currentElement.pause();
currentElement.src && URL.revokeObjectURL(currentElement.src);
audioRef.current = null;
}
};
pauseAudio(messageAudio);
pauseAudio(promiseAudioRef.current);
setIsSpeaking(false);
};
const cancelPromiseSpeech = useCallback(() => {
if (promiseAudioRef.current) {
promiseAudioRef.current.pause();
promiseAudioRef.current.src && URL.revokeObjectURL(promiseAudioRef.current.src);
promiseAudioRef.current = null;
setIsSpeaking(false);
}
}, []);
useEffect(() => cancelSpeech, [cancelSpeech]);
useEffect(() => cancelPromiseSpeech, [cancelPromiseSpeech]);
const isLoading = useMemo(() => {
return isProcessing || (isLast && globalIsFetching && !globalIsPlaying);
@ -152,7 +178,7 @@ function useTextToSpeechExternal(isLast: boolean, index = 0) {
return isLocalSpeaking || (isLast && globalIsPlaying);
}, [isLocalSpeaking, globalIsPlaying, isLast]);
return { generateSpeechExternal, cancelSpeech, isLoading, isSpeaking };
return { generateSpeechExternal, cancelSpeech, isLoading, isSpeaking, audioRef };
}
export default useTextToSpeechExternal;

View file

@ -10,6 +10,7 @@ import useGetSender from '~/hooks/Conversations/useGetSender';
import useFileHandling from '~/hooks/Files/useFileHandling';
import { useChatContext } from '~/Providers/ChatContext';
import useLocalize from '~/hooks/useLocalize';
import { globalAudioId } from '~/common';
import store from '~/store';
type KeyEvent = KeyboardEvent<HTMLTextAreaElement>;
@ -178,6 +179,11 @@ export default function useTextarea({
}
if (isNonShiftEnter && !isComposing?.current) {
const globalAudio = document.getElementById(globalAudioId) as HTMLAudioElement;
if (globalAudio) {
console.log('Unmuting global audio');
globalAudio.muted = false;
}
submitButtonRef.current?.click();
}
},