feat: Add WebSocket functionality and integrate call features in the chat component

This commit is contained in:
Marco Beretta 2024-12-21 14:36:01 +01:00 committed by Danny Avila
parent 6b90817ae0
commit d5bc8d3869
No known key found for this signature in database
GPG key ID: BF31EEB2C5CA0956
21 changed files with 460 additions and 20 deletions

View file

@ -102,6 +102,7 @@
"ua-parser-js": "^1.0.36", "ua-parser-js": "^1.0.36",
"winston": "^3.11.0", "winston": "^3.11.0",
"winston-daily-rotate-file": "^4.7.1", "winston-daily-rotate-file": "^4.7.1",
"ws": "^8.18.0",
"youtube-transcript": "^1.2.1", "youtube-transcript": "^1.2.1",
"zod": "^3.22.4" "zod": "^3.22.4"
}, },

View file

@ -4,6 +4,7 @@ require('module-alias')({ base: path.resolve(__dirname, '..') });
const cors = require('cors'); const cors = require('cors');
const axios = require('axios'); const axios = require('axios');
const express = require('express'); const express = require('express');
const { createServer } = require('http');
const compression = require('compression'); const compression = require('compression');
const passport = require('passport'); const passport = require('passport');
const mongoSanitize = require('express-mongo-sanitize'); const mongoSanitize = require('express-mongo-sanitize');
@ -14,6 +15,7 @@ const { connectDb, indexSync } = require('~/lib/db');
const { isEnabled } = require('~/server/utils'); const { isEnabled } = require('~/server/utils');
const { ldapLogin } = require('~/strategies'); const { ldapLogin } = require('~/strategies');
const { logger } = require('~/config'); const { logger } = require('~/config');
const { WebSocketService } = require('./services/WebSocket/WebSocketServer');
const validateImageRequest = require('./middleware/validateImageRequest'); const validateImageRequest = require('./middleware/validateImageRequest');
const errorController = require('./controllers/ErrorController'); const errorController = require('./controllers/ErrorController');
const configureSocialLogins = require('./socialLogins'); const configureSocialLogins = require('./socialLogins');
@ -36,7 +38,18 @@ const startServer = async () => {
await indexSync(); await indexSync();
const app = express(); const app = express();
const server = createServer(app);
app.disable('x-powered-by'); app.disable('x-powered-by');
app.use(
cors({
origin: true,
credentials: true,
}),
);
new WebSocketService(server);
await AppService(app); await AppService(app);
const indexPath = path.join(app.locals.paths.dist, 'index.html'); const indexPath = path.join(app.locals.paths.dist, 'index.html');
@ -109,6 +122,7 @@ const startServer = async () => {
app.use('/api/agents', routes.agents); app.use('/api/agents', routes.agents);
app.use('/api/banner', routes.banner); app.use('/api/banner', routes.banner);
app.use('/api/bedrock', routes.bedrock); app.use('/api/bedrock', routes.bedrock);
app.use('/api/websocket', routes.websocket);
app.use('/api/tags', routes.tags); app.use('/api/tags', routes.tags);
@ -126,7 +140,7 @@ const startServer = async () => {
res.send(updatedIndexHtml); res.send(updatedIndexHtml);
}); });
app.listen(port, host, () => { server.listen(port, host, () => {
if (host == '0.0.0.0') { if (host == '0.0.0.0') {
logger.info( logger.info(
`Server listening on all interfaces at port ${port}. Use http://localhost:${port} to access it`, `Server listening on all interfaces at port ${port}. Use http://localhost:${port} to access it`,
@ -134,6 +148,8 @@ const startServer = async () => {
} else { } else {
logger.info(`Server listening at http://${host == '0.0.0.0' ? 'localhost' : host}:${port}`); logger.info(`Server listening at http://${host == '0.0.0.0' ? 'localhost' : host}:${port}`);
} }
logger.info(`WebSocket endpoint: ws://${host}:${port}`);
}); });
}; };

View file

@ -2,6 +2,7 @@ const assistants = require('./assistants');
const categories = require('./categories'); const categories = require('./categories');
const tokenizer = require('./tokenizer'); const tokenizer = require('./tokenizer');
const endpoints = require('./endpoints'); const endpoints = require('./endpoints');
const websocket = require('./websocket');
const staticRoute = require('./static'); const staticRoute = require('./static');
const messages = require('./messages'); const messages = require('./messages');
const presets = require('./presets'); const presets = require('./presets');
@ -15,6 +16,7 @@ const models = require('./models');
const convos = require('./convos'); const convos = require('./convos');
const config = require('./config'); const config = require('./config');
const agents = require('./agents'); const agents = require('./agents');
const banner = require('./banner');
const roles = require('./roles'); const roles = require('./roles');
const oauth = require('./oauth'); const oauth = require('./oauth');
const files = require('./files'); const files = require('./files');
@ -25,7 +27,6 @@ const edit = require('./edit');
const keys = require('./keys'); const keys = require('./keys');
const user = require('./user'); const user = require('./user');
const ask = require('./ask'); const ask = require('./ask');
const banner = require('./banner');
module.exports = { module.exports = {
ask, ask,
@ -39,6 +40,7 @@ module.exports = {
files, files,
share, share,
agents, agents,
banner,
bedrock, bedrock,
convos, convos,
search, search,
@ -50,10 +52,10 @@ module.exports = {
presets, presets,
balance, balance,
messages, messages,
websocket,
endpoints, endpoints,
tokenizer, tokenizer,
assistants, assistants,
categories, categories,
staticRoute, staticRoute,
banner,
}; };

View file

@ -0,0 +1,18 @@
const express = require('express');
const optionalJwtAuth = require('~/server/middleware/optionalJwtAuth');
const router = express.Router();
router.get('/', optionalJwtAuth, async (req, res) => {
const isProduction = process.env.NODE_ENV === 'production';
const useSSL = isProduction && process.env.SERVER_DOMAIN?.startsWith('https');
const protocol = useSSL ? 'wss' : 'ws';
const serverDomain = process.env.SERVER_DOMAIN
? process.env.SERVER_DOMAIN.replace(/^https?:\/\//, '')
: req.headers.host;
const wsUrl = `${protocol}://${serverDomain}/ws`;
res.json({ url: wsUrl });
});
module.exports = router;

View file

@ -0,0 +1,70 @@
const { WebSocketServer } = require('ws');
const fs = require('fs');
const path = require('path');
module.exports.WebSocketService = class {
constructor(server) {
this.wss = new WebSocketServer({ server, path: '/ws' });
this.log('Server initialized');
this.clientAudioBuffers = new Map();
this.setupHandlers();
}
log(msg) {
console.log(`[WSS ${new Date().toISOString()}] ${msg}`);
}
setupHandlers() {
this.wss.on('connection', (ws) => {
const clientId = Date.now().toString();
this.clientAudioBuffers.set(clientId, []);
this.log(`Client connected: ${clientId}`);
ws.on('message', async (raw) => {
let message;
try {
message = JSON.parse(raw);
} catch {
return;
}
if (message.type === 'audio-chunk') {
if (!this.clientAudioBuffers.has(clientId)) {
this.clientAudioBuffers.set(clientId, []);
}
this.clientAudioBuffers.get(clientId).push(message.data);
}
if (message.type === 'request-response') {
const filePath = path.join(__dirname, './assets/response.mp3');
const audioFile = fs.readFileSync(filePath);
ws.send(JSON.stringify({ type: 'audio-response', data: audioFile.toString('base64') }));
}
if (message.type === 'call-ended') {
const allChunks = this.clientAudioBuffers.get(clientId);
this.writeAudioFile(clientId, allChunks);
this.clientAudioBuffers.delete(clientId);
}
});
ws.on('close', () => {
this.log(`Client disconnected: ${clientId}`);
this.clientAudioBuffers.delete(clientId);
});
});
}
writeAudioFile(clientId, base64Chunks) {
if (!base64Chunks || base64Chunks.length === 0) {
return;
}
const filePath = path.join(__dirname, `recorded_${clientId}.webm`);
const buffer = Buffer.concat(
base64Chunks.map((chunk) => Buffer.from(chunk.split(',')[1], 'base64')),
);
fs.writeFileSync(filePath, buffer);
this.log(`Saved audio to ${filePath}`);
}
};

View file

@ -0,0 +1,52 @@
import { useRecoilState } from 'recoil';
import { Mic, Phone, PhoneOff } from 'lucide-react';
import { OGDialog, OGDialogContent, Button } from '~/components';
import { useWebRTC, useWebSocket, useCall } from '~/hooks';
import store from '~/store';
export const Call: React.FC = () => {
const { isConnected } = useWebSocket();
const { isCalling, startCall, hangUp } = useCall();
const [open, setOpen] = useRecoilState(store.callDialogOpen(0));
return (
<OGDialog open={open} onOpenChange={setOpen}>
<OGDialogContent className="w-96 p-8">
<div className="flex flex-col items-center gap-6">
<div
className={`flex items-center gap-2 rounded-full px-4 py-2 ${
isConnected ? 'bg-green-100 text-green-700' : 'bg-red-100 text-red-700'
}`}
>
<div
className={`h-2 w-2 rounded-full ${isConnected ? 'bg-green-500' : 'bg-red-500'}`}
/>
<span className="text-sm font-medium">
{isConnected ? 'Connected' : 'Disconnected'}
</span>
</div>
{!isCalling ? (
<Button
onClick={startCall}
disabled={!isConnected}
className="flex items-center gap-2 rounded-full bg-green-500 px-6 py-3 text-white hover:bg-green-600 disabled:opacity-50"
>
<Phone size={20} />
<span>Start Call</span>
</Button>
) : (
<Button
onClick={hangUp}
className="flex items-center gap-2 rounded-full bg-red-500 px-6 py-3 text-white hover:bg-red-600"
>
<PhoneOff size={20} />
<span>End Call</span>
</Button>
)}
</div>
</OGDialogContent>
</OGDialog>
);
};

View file

@ -1,4 +1,3 @@
import { useWatch } from 'react-hook-form';
import { memo, useRef, useMemo, useEffect, useState } from 'react'; import { memo, useRef, useMemo, useEffect, useState } from 'react';
import { useRecoilState, useRecoilValue } from 'recoil'; import { useRecoilState, useRecoilValue } from 'recoil';
import { import {
@ -32,10 +31,10 @@ import AudioRecorder from './AudioRecorder';
import { mainTextareaId } from '~/common'; import { mainTextareaId } from '~/common';
import CollapseChat from './CollapseChat'; import CollapseChat from './CollapseChat';
import StreamAudio from './StreamAudio'; import StreamAudio from './StreamAudio';
import CallButton from './CallButton';
import StopButton from './StopButton'; import StopButton from './StopButton';
import SendButton from './SendButton'; import SendButton from './SendButton';
import Mention from './Mention'; import Mention from './Mention';
import { Call } from './Call';
import store from '~/store'; import store from '~/store';
const ChatForm = ({ index = 0 }) => { const ChatForm = ({ index = 0 }) => {

View file

@ -1,11 +1,13 @@
import React, { forwardRef } from 'react'; import React, { forwardRef } from 'react';
import { useWatch } from 'react-hook-form'; import { useWatch } from 'react-hook-form';
import { useSetRecoilState } from 'recoil';
import type { TRealtimeEphemeralTokenResponse } from 'librechat-data-provider'; import type { TRealtimeEphemeralTokenResponse } from 'librechat-data-provider';
import type { Control } from 'react-hook-form'; import type { Control } from 'react-hook-form';
import { useRealtimeEphemeralTokenMutation } from '~/data-provider'; import { useRealtimeEphemeralTokenMutation } from '~/data-provider';
import { TooltipAnchor, SendIcon, CallIcon } from '~/components'; import { TooltipAnchor, SendIcon, CallIcon } from '~/components';
import { useToastContext } from '~/Providers/ToastContext'; import { useToastContext } from '~/Providers/ToastContext';
import { useLocalize } from '~/hooks'; import { useLocalize } from '~/hooks';
import store from '~/store';
import { cn } from '~/utils'; import { cn } from '~/utils';
type ButtonProps = { type ButtonProps = {
@ -56,25 +58,27 @@ const SendButton = forwardRef((props: ButtonProps, ref: React.ForwardedRef<HTMLB
const localize = useLocalize(); const localize = useLocalize();
const { showToast } = useToastContext(); const { showToast } = useToastContext();
const { text = '' } = useWatch({ control: props.control }); const { text = '' } = useWatch({ control: props.control });
const setCallOpen = useSetRecoilState(store.callDialogOpen(0));
const { mutate: startCall, isLoading: isProcessing } = useRealtimeEphemeralTokenMutation({ // const { mutate: startCall, isLoading: isProcessing } = useRealtimeEphemeralTokenMutation({
onSuccess: async (data: TRealtimeEphemeralTokenResponse) => { // onSuccess: async (data: TRealtimeEphemeralTokenResponse) => {
showToast({ // showToast({
message: 'IT WORKS!!', // message: 'IT WORKS!!',
status: 'success', // status: 'success',
}); // });
}, // },
onError: (error: unknown) => { // onError: (error: unknown) => {
showToast({ // showToast({
message: localize('com_nav_audio_process_error', (error as Error).message), // message: localize('com_nav_audio_process_error', (error as Error).message),
status: 'error', // status: 'error',
}); // });
}, // },
}); // });
const handleClick = () => { const handleClick = () => {
if (text.trim() === '') { if (text.trim() === '') {
startCall({ voice: 'verse' }); setCallOpen(true);
// startCall({ voice: 'verse' });
} }
}; };

View file

@ -18,10 +18,13 @@ export * from './AuthContext';
export * from './ThemeContext'; export * from './ThemeContext';
export * from './ScreenshotContext'; export * from './ScreenshotContext';
export * from './ApiErrorBoundaryContext'; export * from './ApiErrorBoundaryContext';
export { default as useCall } from './useCall';
export { default as useToast } from './useToast'; export { default as useToast } from './useToast';
export { default as useWebRTC } from './useWebRTC';
export { default as useTimeout } from './useTimeout'; export { default as useTimeout } from './useTimeout';
export { default as useNewConvo } from './useNewConvo'; export { default as useNewConvo } from './useNewConvo';
export { default as useLocalize } from './useLocalize'; export { default as useLocalize } from './useLocalize';
export { default as useWebSocket } from './useWebSocket';
export type { TranslationKeys } from './useLocalize'; export type { TranslationKeys } from './useLocalize';
export { default as useMediaQuery } from './useMediaQuery'; export { default as useMediaQuery } from './useMediaQuery';
export { default as useScrollToRef } from './useScrollToRef'; export { default as useScrollToRef } from './useScrollToRef';

View file

@ -0,0 +1,67 @@
import { useState, useRef, useCallback } from 'react';
import useWebSocket from './useWebSocket';
import { WebRTCService } from '../services/WebRTC/WebRTCService';
const SILENCE_THRESHOLD = -50;
const SILENCE_DURATION = 1000;
const useCall = () => {
const { sendMessage } = useWebSocket();
const [isCalling, setIsCalling] = useState(false);
const audioContextRef = useRef<AudioContext | null>(null);
const analyserRef = useRef<AnalyserNode | null>(null);
const silenceStartRef = useRef<number | null>(null);
const intervalRef = useRef<number | null>(null);
const webrtcServiceRef = useRef<WebRTCService | null>(null);
const checkSilence = useCallback(() => {
if (!analyserRef.current || !isCalling) {
return;
}
const data = new Float32Array(analyserRef.current.frequencyBinCount);
analyserRef.current.getFloatFrequencyData(data);
const avg = data.reduce((a, b) => a + b) / data.length;
if (avg < SILENCE_THRESHOLD) {
if (!silenceStartRef.current) {
silenceStartRef.current = Date.now();
} else if (Date.now() - silenceStartRef.current > SILENCE_DURATION) {
sendMessage({ type: 'request-response' });
silenceStartRef.current = null;
}
} else {
silenceStartRef.current = null;
}
}, [isCalling, sendMessage]);
const startCall = useCallback(async () => {
webrtcServiceRef.current = new WebRTCService(sendMessage);
await webrtcServiceRef.current.initializeCall();
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
audioContextRef.current = new AudioContext();
const source = audioContextRef.current.createMediaStreamSource(stream);
analyserRef.current = audioContextRef.current.createAnalyser();
source.connect(analyserRef.current);
intervalRef.current = window.setInterval(checkSilence, 100);
setIsCalling(true);
}, [checkSilence, sendMessage]);
const hangUp = useCallback(async () => {
if (intervalRef.current) {
clearInterval(intervalRef.current);
}
analyserRef.current = null;
audioContextRef.current?.close();
audioContextRef.current = null;
await webrtcServiceRef.current?.endCall();
webrtcServiceRef.current = null;
setIsCalling(false);
sendMessage({ type: 'call-ended' });
}, [sendMessage]);
return { isCalling, startCall, hangUp };
};
export default useCall;

View file

@ -0,0 +1,77 @@
import { useRef, useCallback } from 'react';
import useWebSocket from './useWebSocket';
const SILENCE_THRESHOLD = -50;
const SILENCE_DURATION = 1000;
const useWebRTC = () => {
const { sendMessage } = useWebSocket();
const localStreamRef = useRef<MediaStream | null>(null);
const audioContextRef = useRef<AudioContext | null>(null);
const analyserRef = useRef<AnalyserNode | null>(null);
const silenceStartTime = useRef<number | null>(null);
const isProcessingRef = useRef(false);
const log = (msg: string) => console.log(`[WebRTC ${new Date().toISOString()}] ${msg}`);
const processAudioLevel = () => {
if (!analyserRef.current || !isProcessingRef.current) {
return;
}
const dataArray = new Float32Array(analyserRef.current.frequencyBinCount);
analyserRef.current.getFloatFrequencyData(dataArray);
const average = dataArray.reduce((a, b) => a + b) / dataArray.length;
if (average < SILENCE_THRESHOLD) {
if (!silenceStartTime.current) {
silenceStartTime.current = Date.now();
log(`Silence started: ${average}dB`);
} else if (Date.now() - silenceStartTime.current > SILENCE_DURATION) {
log('Silence threshold reached - requesting response');
sendMessage({ type: 'request-response' });
silenceStartTime.current = null;
}
} else {
silenceStartTime.current = null;
}
requestAnimationFrame(processAudioLevel);
};
const startLocalStream = async () => {
try {
log('Starting audio capture');
localStreamRef.current = await navigator.mediaDevices.getUserMedia({ audio: true });
audioContextRef.current = new AudioContext();
const source = audioContextRef.current.createMediaStreamSource(localStreamRef.current);
analyserRef.current = audioContextRef.current.createAnalyser();
source.connect(analyserRef.current);
isProcessingRef.current = true;
processAudioLevel();
log('Audio capture started');
} catch (error) {
log(`Error: ${error instanceof Error ? error.message : 'Unknown error'}`);
throw error;
}
};
const stopLocalStream = useCallback(() => {
log('Stopping audio capture');
isProcessingRef.current = false;
audioContextRef.current?.close();
localStreamRef.current?.getTracks().forEach((track) => track.stop());
localStreamRef.current = null;
audioContextRef.current = null;
analyserRef.current = null;
silenceStartTime.current = null;
}, []);
return { startLocalStream, stopLocalStream };
};
export default useWebRTC;

View file

@ -0,0 +1,45 @@
import { useEffect, useRef, useState, useCallback } from 'react';
import { useGetWebsocketUrlQuery } from 'librechat-data-provider/react-query';
const useWebSocket = () => {
const { data: url } = useGetWebsocketUrlQuery();
const [isConnected, setIsConnected] = useState(false);
const wsRef = useRef<WebSocket | null>(null);
console.log('wsConfig:', url?.url);
const connect = useCallback(() => {
if (!url?.url) {
return;
}
wsRef.current = new WebSocket(url?.url);
wsRef.current.onopen = () => setIsConnected(true);
wsRef.current.onclose = () => setIsConnected(false);
wsRef.current.onerror = (err) => console.error('WebSocket error:', err);
wsRef.current.onmessage = (event) => {
const msg = JSON.parse(event.data);
if (msg.type === 'audio-response') {
const audioData = msg.data;
const audio = new Audio(`data:audio/mp3;base64,${audioData}`);
audio.play().catch(console.error);
}
};
}, [url?.url]);
useEffect(() => {
connect();
return () => wsRef.current?.close();
}, [connect]);
const sendMessage = useCallback((message: any) => {
if (wsRef.current?.readyState === WebSocket.OPEN) {
wsRef.current.send(JSON.stringify(message));
}
}, []);
return { isConnected, sendMessage };
};
export default useWebSocket;

View file

@ -0,0 +1,36 @@
export class WebRTCService {
private peerConnection: RTCPeerConnection | null = null;
private mediaRecorder: MediaRecorder | null = null;
private sendMessage: (msg: any) => void;
constructor(sendMessage: (msg: any) => void) {
this.sendMessage = sendMessage;
}
async initializeCall() {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
this.peerConnection = new RTCPeerConnection();
stream.getTracks().forEach((track) => this.peerConnection?.addTrack(track, stream));
this.mediaRecorder = new MediaRecorder(stream);
this.mediaRecorder.ondataavailable = (e) => {
if (e.data.size > 0) {
const reader = new FileReader();
reader.onload = () => {
this.sendMessage({
type: 'audio-chunk',
data: reader.result,
});
};
reader.readAsDataURL(e.data);
}
};
this.mediaRecorder.start();
}
async endCall() {
this.mediaRecorder?.stop();
this.peerConnection?.close();
this.peerConnection = null;
}
}

View file

@ -368,6 +368,11 @@ const updateConversationSelector = selectorFamily({
}, },
}); });
const callDialogOpen = atomFamily<boolean, string | number | null>({
key: 'callDialogOpen',
default: false,
});
export default { export default {
conversationKeysAtom, conversationKeysAtom,
conversationByIndex, conversationByIndex,
@ -399,4 +404,5 @@ export default {
useClearLatestMessages, useClearLatestMessages,
showPromptsPopoverFamily, showPromptsPopoverFamily,
updateConversationSelector, updateConversationSelector,
callDialogOpen,
}; };

22
package-lock.json generated
View file

@ -118,6 +118,7 @@
"ua-parser-js": "^1.0.36", "ua-parser-js": "^1.0.36",
"winston": "^3.11.0", "winston": "^3.11.0",
"winston-daily-rotate-file": "^4.7.1", "winston-daily-rotate-file": "^4.7.1",
"ws": "^8.18.0",
"youtube-transcript": "^1.2.1", "youtube-transcript": "^1.2.1",
"zod": "^3.22.4" "zod": "^3.22.4"
}, },
@ -1593,6 +1594,27 @@
"webidl-conversions": "^3.0.0" "webidl-conversions": "^3.0.0"
} }
}, },
"api/node_modules/ws": {
"version": "8.18.0",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz",
"integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==",
"license": "MIT",
"engines": {
"node": ">=10.0.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": ">=5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
"optional": true
},
"utf-8-validate": {
"optional": true
}
}
},
"client": { "client": {
"name": "@librechat/frontend", "name": "@librechat/frontend",
"version": "v0.7.7-rc1", "version": "v0.7.7-rc1",

View file

@ -239,3 +239,5 @@ export const addTagToConversation = (conversationId: string) =>
export const userTerms = () => '/api/user/terms'; export const userTerms = () => '/api/user/terms';
export const acceptUserTerms = () => '/api/user/terms/accept'; export const acceptUserTerms = () => '/api/user/terms/accept';
export const banner = () => '/api/banner'; export const banner = () => '/api/banner';
export const websocket = () => '/api/websocket';

View file

@ -780,3 +780,7 @@ export function acceptTerms(): Promise<t.TAcceptTermsResponse> {
export function getBanner(): Promise<t.TBannerResponse> { export function getBanner(): Promise<t.TBannerResponse> {
return request.get(endpoints.banner()); return request.get(endpoints.banner());
} }
export function getWebsocketUrl(): Promise<t.TWebsocketUrlResponse> {
return request.get(endpoints.websocket());
}

View file

@ -46,6 +46,7 @@ export enum QueryKeys {
health = 'health', health = 'health',
userTerms = 'userTerms', userTerms = 'userTerms',
banner = 'banner', banner = 'banner',
websocketUrl = 'websocketUrl',
} }
export enum MutationKeys { export enum MutationKeys {

View file

@ -376,3 +376,14 @@ export const useGetCustomConfigSpeechQuery = (
}, },
); );
}; };
export const useGetWebsocketUrlQuery = (
config?: UseQueryOptions<t.TWebsocketUrlResponse>,
): QueryObserverResult<t.TWebsocketUrlResponse> => {
return useQuery<t.TWebsocketUrlResponse>([QueryKeys.websocketUrl], () => dataService.getWebsocketUrl(), {
refetchOnWindowFocus: false,
refetchOnReconnect: false,
refetchOnMount: false,
...config,
});
};

View file

@ -482,3 +482,7 @@ export type TRealtimeEphemeralTokenResponse = {
token: string; token: string;
url: string; url: string;
}; };
export type TWebsocketUrlResponse = {
url: string;
};