mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-20 02:10:15 +01:00
✨ feat: Implement WebRTC messaging and audio handling in the WebRTC service
This commit is contained in:
parent
d5bc8d3869
commit
7717d3a514
8 changed files with 771 additions and 139 deletions
|
|
@ -1,67 +1,106 @@
|
|||
import { useState, useRef, useCallback } from 'react';
|
||||
import useWebSocket from './useWebSocket';
|
||||
import { WebRTCService } from '../services/WebRTC/WebRTCService';
|
||||
import type { RTCMessage } from '~/common';
|
||||
import useWebSocket from './useWebSocket';
|
||||
|
||||
const SILENCE_THRESHOLD = -50;
|
||||
const SILENCE_DURATION = 1000;
|
||||
|
||||
const useCall = () => {
|
||||
const { sendMessage } = useWebSocket();
|
||||
const { sendMessage: wsMessage } = useWebSocket();
|
||||
const [isCalling, setIsCalling] = useState(false);
|
||||
const [isProcessing, setIsProcessing] = useState(false);
|
||||
const audioContextRef = useRef<AudioContext | null>(null);
|
||||
const analyserRef = useRef<AnalyserNode | null>(null);
|
||||
const audioChunksRef = useRef<Blob[]>([]);
|
||||
const silenceStartRef = useRef<number | null>(null);
|
||||
const intervalRef = useRef<number | null>(null);
|
||||
const webrtcServiceRef = useRef<WebRTCService | null>(null);
|
||||
|
||||
const checkSilence = useCallback(() => {
|
||||
if (!analyserRef.current || !isCalling) {
|
||||
const sendAudioChunk = useCallback(() => {
|
||||
if (audioChunksRef.current.length === 0) {
|
||||
return;
|
||||
}
|
||||
const data = new Float32Array(analyserRef.current.frequencyBinCount);
|
||||
analyserRef.current.getFloatFrequencyData(data);
|
||||
const avg = data.reduce((a, b) => a + b) / data.length;
|
||||
if (avg < SILENCE_THRESHOLD) {
|
||||
if (!silenceStartRef.current) {
|
||||
silenceStartRef.current = Date.now();
|
||||
} else if (Date.now() - silenceStartRef.current > SILENCE_DURATION) {
|
||||
sendMessage({ type: 'request-response' });
|
||||
silenceStartRef.current = null;
|
||||
}
|
||||
} else {
|
||||
silenceStartRef.current = null;
|
||||
|
||||
const audioBlob = new Blob(audioChunksRef.current, { type: 'audio/webm' });
|
||||
// Send audio through WebRTC data channel
|
||||
webrtcServiceRef.current?.sendAudioChunk(audioBlob);
|
||||
// Signal processing start via WebSocket
|
||||
wsMessage({ type: 'processing-start' });
|
||||
|
||||
audioChunksRef.current = [];
|
||||
setIsProcessing(true);
|
||||
}, [wsMessage]);
|
||||
|
||||
const handleRTCMessage = useCallback((message: RTCMessage) => {
|
||||
if (message.type === 'audio-received') {
|
||||
// Backend confirmed audio receipt
|
||||
setIsProcessing(true);
|
||||
}
|
||||
}, [isCalling, sendMessage]);
|
||||
}, []);
|
||||
|
||||
const startCall = useCallback(async () => {
|
||||
webrtcServiceRef.current = new WebRTCService(sendMessage);
|
||||
// Initialize WebRTC with message handler
|
||||
webrtcServiceRef.current = new WebRTCService(handleRTCMessage);
|
||||
await webrtcServiceRef.current.initializeCall();
|
||||
|
||||
// Signal call start via WebSocket
|
||||
wsMessage({ type: 'call-start' });
|
||||
|
||||
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
||||
audioContextRef.current = new AudioContext();
|
||||
const source = audioContextRef.current.createMediaStreamSource(stream);
|
||||
analyserRef.current = audioContextRef.current.createAnalyser();
|
||||
source.connect(analyserRef.current);
|
||||
|
||||
intervalRef.current = window.setInterval(checkSilence, 100);
|
||||
// Start VAD monitoring
|
||||
intervalRef.current = window.setInterval(() => {
|
||||
if (!analyserRef.current || !isCalling) {
|
||||
return;
|
||||
}
|
||||
|
||||
const data = new Float32Array(analyserRef.current.frequencyBinCount);
|
||||
analyserRef.current.getFloatFrequencyData(data);
|
||||
const avg = data.reduce((a, b) => a + b) / data.length;
|
||||
|
||||
if (avg < SILENCE_THRESHOLD) {
|
||||
if (silenceStartRef.current === null) {
|
||||
silenceStartRef.current = Date.now();
|
||||
} else if (Date.now() - silenceStartRef.current > SILENCE_DURATION) {
|
||||
sendAudioChunk();
|
||||
silenceStartRef.current = null;
|
||||
}
|
||||
} else {
|
||||
silenceStartRef.current = null;
|
||||
}
|
||||
}, 100);
|
||||
|
||||
setIsCalling(true);
|
||||
}, [checkSilence, sendMessage]);
|
||||
}, [handleRTCMessage, wsMessage, sendAudioChunk]);
|
||||
|
||||
const hangUp = useCallback(async () => {
|
||||
if (intervalRef.current) {
|
||||
clearInterval(intervalRef.current);
|
||||
}
|
||||
|
||||
analyserRef.current = null;
|
||||
audioContextRef.current?.close();
|
||||
audioContextRef.current = null;
|
||||
|
||||
await webrtcServiceRef.current?.endCall();
|
||||
webrtcServiceRef.current = null;
|
||||
setIsCalling(false);
|
||||
sendMessage({ type: 'call-ended' });
|
||||
}, [sendMessage]);
|
||||
|
||||
return { isCalling, startCall, hangUp };
|
||||
setIsCalling(false);
|
||||
setIsProcessing(false);
|
||||
wsMessage({ type: 'call-ended' });
|
||||
}, [wsMessage]);
|
||||
|
||||
return {
|
||||
isCalling,
|
||||
isProcessing,
|
||||
startCall,
|
||||
hangUp,
|
||||
};
|
||||
};
|
||||
|
||||
export default useCall;
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue