feat: enhance call functionality with VAD integration and mute handling

This commit is contained in:
Marco Beretta 2025-01-04 01:55:47 +01:00 committed by Danny Avila
parent 7aed891838
commit c768b8feb1
No known key found for this signature in database
GPG key ID: BF31EEB2C5CA0956
7 changed files with 316 additions and 87 deletions

View file

@ -21,7 +21,6 @@ class WebRTCConnection {
await this.peerConnection.setRemoteDescription(offer); await this.peerConnection.setRemoteDescription(offer);
// Create MediaStream instance properly
const mediaStream = new MediaStream(); const mediaStream = new MediaStream();
this.audioTransceiver = this.peerConnection.addTransceiver('audio', { this.audioTransceiver = this.peerConnection.addTransceiver('audio', {
@ -34,7 +33,6 @@ class WebRTCConnection {
this.socket.emit('webrtc-answer', answer); this.socket.emit('webrtc-answer', answer);
} catch (error) { } catch (error) {
this.log(`Error handling offer: ${error}`, 'error'); this.log(`Error handling offer: ${error}`, 'error');
// Don't throw, handle gracefully
this.socket.emit('webrtc-error', { this.socket.emit('webrtc-error', {
message: error.message, message: error.message,
code: 'OFFER_ERROR', code: 'OFFER_ERROR',
@ -47,13 +45,11 @@ class WebRTCConnection {
return; return;
} }
// Handle incoming audio tracks this.peerConnection.ontrack = ({ track }) => {
this.peerConnection.ontrack = ({ track, streams }) => {
this.log(`Received ${track.kind} track from client`); this.log(`Received ${track.kind} track from client`);
// For testing: Echo the audio back after a delay
if (track.kind === 'audio') { if (track.kind === 'audio') {
this.handleIncomingAudio(track, streams[0]); this.handleIncomingAudio(track);
} }
track.onended = () => { track.onended = () => {
@ -71,27 +67,22 @@ class WebRTCConnection {
if (!this.peerConnection) { if (!this.peerConnection) {
return; return;
} }
const state = this.peerConnection.connectionState; const state = this.peerConnection.connectionState;
this.log(`Connection state changed to ${state}`); this.log(`Connection state changed to ${state}`);
this.state = state; this.state = state;
if (state === 'failed' || state === 'closed') { if (state === 'failed' || state === 'closed') {
this.cleanup(); this.cleanup();
} }
}; };
}
this.peerConnection.oniceconnectionstatechange = () => { handleIncomingAudio(track) {
if (this.peerConnection) { if (this.peerConnection) {
this.log(`ICE connection state: ${this.peerConnection.iceConnectionState}`); const stream = new MediaStream([track]);
this.peerConnection.addTrack(track, stream);
} }
};
}
handleIncomingAudio(inputTrack) {
// For testing: Echo back the input track directly
this.peerConnection.addTrack(inputTrack);
// Log the track info for debugging
this.log(`Audio track added: ${inputTrack.id}, enabled: ${inputTrack.enabled}`);
} }
async addIceCandidate(candidate) { async addIceCandidate(candidate) {
@ -119,6 +110,7 @@ class WebRTCConnection {
} }
this.peerConnection = null; this.peerConnection = null;
} }
this.audioTransceiver = null; this.audioTransceiver = null;
this.pendingCandidates = []; this.pendingCandidates = [];
this.state = 'idle'; this.state = 'idle';
@ -153,16 +145,10 @@ class SocketIOService {
this.setupSocketHandlers(); this.setupSocketHandlers();
} }
log(message, level = 'info') {
const timestamp = new Date().toISOString();
console.log(`[WebRTC ${timestamp}] [${level.toUpperCase()}] ${message}`);
}
setupSocketHandlers() { setupSocketHandlers() {
this.io.on('connection', (socket) => { this.io.on('connection', (socket) => {
this.log(`Client connected: ${socket.id}`); this.log(`Client connected: ${socket.id}`);
// Create a new WebRTC connection for this socket
const rtcConnection = new WebRTCConnection(socket, { const rtcConnection = new WebRTCConnection(socket, {
...this.config, ...this.config,
log: this.log.bind(this), log: this.log.bind(this),
@ -178,6 +164,10 @@ class SocketIOService {
rtcConnection.addIceCandidate(candidate); rtcConnection.addIceCandidate(candidate);
}); });
socket.on('vad-status', (status) => {
this.log(`VAD status from ${socket.id}: ${JSON.stringify(status)}`);
});
socket.on('disconnect', () => { socket.on('disconnect', () => {
this.log(`Client disconnected: ${socket.id}`); this.log(`Client disconnected: ${socket.id}`);
rtcConnection.cleanup(); rtcConnection.cleanup();
@ -186,6 +176,11 @@ class SocketIOService {
}); });
} }
log(message, level = 'info') {
const timestamp = new Date().toISOString();
console.log(`[WebRTC ${timestamp}] [${level.toUpperCase()}] ${message}`);
}
shutdown() { shutdown() {
for (const connection of this.connections.values()) { for (const connection of this.connections.values()) {
connection.cleanup(); connection.cleanup();

View file

@ -51,6 +51,7 @@
"@radix-ui/react-switch": "^1.0.3", "@radix-ui/react-switch": "^1.0.3",
"@radix-ui/react-tabs": "^1.0.3", "@radix-ui/react-tabs": "^1.0.3",
"@radix-ui/react-toast": "^1.1.5", "@radix-ui/react-toast": "^1.1.5",
"@ricky0123/vad-react": "^0.0.28",
"@tanstack/react-query": "^4.28.0", "@tanstack/react-query": "^4.28.0",
"@tanstack/react-table": "^8.11.7", "@tanstack/react-table": "^8.11.7",
"class-variance-authority": "^0.6.0", "class-variance-authority": "^0.6.0",

View file

@ -64,7 +64,7 @@ export interface RTCMessage {
export type MessagePayload = export type MessagePayload =
| RTCSessionDescriptionInit | RTCSessionDescriptionInit
| RTCIceCandidateInit | RTCIceCandidateInit
| Record<string, never>; | { speaking: boolean };
export enum CallState { export enum CallState {
IDLE = 'idle', IDLE = 'idle',

View file

@ -26,11 +26,12 @@ export const Call: React.FC = () => {
localStream, localStream,
remoteStream, remoteStream,
connectionQuality, connectionQuality,
isMuted,
toggleMute,
} = useCall(); } = useCall();
const [open, setOpen] = useRecoilState(store.callDialogOpen(0)); const [open, setOpen] = useRecoilState(store.callDialogOpen(0));
const [eventLog, setEventLog] = React.useState<string[]>([]); const [eventLog, setEventLog] = React.useState<string[]>([]);
const [isMuted, setIsMuted] = React.useState(false);
const [isAudioEnabled, setIsAudioEnabled] = React.useState(true); const [isAudioEnabled, setIsAudioEnabled] = React.useState(true);
const remoteAudioRef = useRef<HTMLAudioElement>(null); const remoteAudioRef = useRef<HTMLAudioElement>(null);
@ -84,8 +85,8 @@ export const Call: React.FC = () => {
hangUp(); hangUp();
}; };
const toggleMute = () => { const handleToggleMute = () => {
setIsMuted((prev) => !prev); toggleMute();
logEvent(`Microphone ${isMuted ? 'unmuted' : 'muted'}`); logEvent(`Microphone ${isMuted ? 'unmuted' : 'muted'}`);
}; };
@ -176,7 +177,7 @@ export const Call: React.FC = () => {
{isActive && ( {isActive && (
<> <>
<Button <Button
onClick={toggleMute} onClick={handleToggleMute}
className={`rounded-full p-3 ${ className={`rounded-full p-3 ${
isMuted ? 'bg-red-100 text-red-700' : 'bg-gray-100 text-gray-700' isMuted ? 'bg-red-100 text-red-700' : 'bg-gray-100 text-gray-700'
}`} }`}
@ -218,10 +219,9 @@ export const Call: React.FC = () => {
</div> </div>
{/* Event Log */} {/* Event Log */}
<div className="mt-4 w-full rounded-md bg-gray-100 p-4 shadow-sm">
<h3 className="mb-2 text-lg font-medium">Event Log</h3> <h3 className="mb-2 text-lg font-medium">Event Log</h3>
<div className="h-32 overflow-y-auto rounded-md bg-white p-2 shadow-inner"> <div className="h-64 overflow-y-auto rounded-md bg-surface-secondary p-2 shadow-inner">
<ul className="space-y-1 text-xs text-gray-600"> <ul className="space-y-1 text-xs text-text-secondary">
{eventLog.map((log, index) => ( {eventLog.map((log, index) => (
<li key={index} className="font-mono"> <li key={index} className="font-mono">
{log} {log}
@ -229,14 +229,9 @@ export const Call: React.FC = () => {
))} ))}
</ul> </ul>
</div> </div>
</div>
{/* Hidden Audio Element */} {/* Hidden Audio Element */}
<audio <audio ref={remoteAudioRef} autoPlay>
ref={remoteAudioRef}
autoPlay
playsInline
>
<track kind="captions" /> <track kind="captions" />
</audio> </audio>
</div> </div>

View file

@ -1,5 +1,5 @@
import { useState, useRef, useCallback, useEffect } from 'react'; import { useState, useRef, useCallback, useEffect } from 'react';
import { WebRTCService, ConnectionState } from '../services/WebRTC/WebRTCService'; import { WebRTCService, ConnectionState, useVADSetup } from '../services/WebRTC/WebRTCService';
import useWebSocket, { WebSocketEvents } from './useWebSocket'; import useWebSocket, { WebSocketEvents } from './useWebSocket';
interface CallError { interface CallError {
@ -22,6 +22,8 @@ interface CallStatus {
localStream: MediaStream | null; localStream: MediaStream | null;
remoteStream: MediaStream | null; remoteStream: MediaStream | null;
connectionQuality: 'good' | 'poor' | 'unknown'; connectionQuality: 'good' | 'poor' | 'unknown';
isUserSpeaking: boolean;
remoteAISpeaking: boolean;
} }
const INITIAL_STATUS: CallStatus = { const INITIAL_STATUS: CallStatus = {
@ -31,6 +33,8 @@ const INITIAL_STATUS: CallStatus = {
localStream: null, localStream: null,
remoteStream: null, remoteStream: null,
connectionQuality: 'unknown', connectionQuality: 'unknown',
isUserSpeaking: false,
remoteAISpeaking: false,
}; };
const useCall = () => { const useCall = () => {
@ -38,33 +42,19 @@ const useCall = () => {
const [status, setStatus] = useState<CallStatus>(INITIAL_STATUS); const [status, setStatus] = useState<CallStatus>(INITIAL_STATUS);
const webrtcServiceRef = useRef<WebRTCService | null>(null); const webrtcServiceRef = useRef<WebRTCService | null>(null);
const statsIntervalRef = useRef<NodeJS.Timeout>(); const statsIntervalRef = useRef<NodeJS.Timeout>();
const [isMuted, setIsMuted] = useState(false);
const vad = useVADSetup(webrtcServiceRef.current);
const updateStatus = useCallback((updates: Partial<CallStatus>) => { const updateStatus = useCallback((updates: Partial<CallStatus>) => {
setStatus((prev) => ({ ...prev, ...updates })); setStatus((prev) => ({ ...prev, ...updates }));
}, []); }, []);
useEffect(() => { useEffect(() => {
return () => { updateStatus({ isUserSpeaking: vad.userSpeaking });
if (statsIntervalRef.current) { }, [vad.userSpeaking, updateStatus]);
clearInterval(statsIntervalRef.current);
}
if (webrtcServiceRef.current) {
webrtcServiceRef.current.close();
}
};
}, []);
const handleRemoteStream = (stream: MediaStream | null) => { const handleRemoteStream = (stream: MediaStream | null) => {
console.log('[WebRTC] Remote stream received:', {
stream: stream,
active: stream?.active,
tracks: stream?.getTracks().map((t) => ({
kind: t.kind,
enabled: t.enabled,
muted: t.muted,
})),
});
if (!stream) { if (!stream) {
console.error('[WebRTC] Received null remote stream'); console.error('[WebRTC] Received null remote stream');
updateStatus({ updateStatus({
@ -122,10 +112,8 @@ const useCall = () => {
break; break;
case ConnectionState.CLOSED: case ConnectionState.CLOSED:
updateStatus({ updateStatus({
...INITIAL_STATUS,
callState: CallState.ENDED, callState: CallState.ENDED,
isConnecting: false,
localStream: null,
remoteStream: null,
}); });
break; break;
} }
@ -188,17 +176,15 @@ const useCall = () => {
error: null, error: null,
}); });
// TODO: Remove debug or make it configurable webrtcServiceRef.current = new WebRTCService(sendMessage, {
webrtcServiceRef.current = new WebRTCService((message) => sendMessage(message), {
debug: true, debug: true,
}); });
webrtcServiceRef.current.on('connectionStateChange', (state: ConnectionState) => { webrtcServiceRef.current.on('connectionStateChange', handleConnectionStateChange);
console.log('WebRTC connection state changed:', state);
handleConnectionStateChange(state);
});
webrtcServiceRef.current.on('remoteStream', handleRemoteStream); webrtcServiceRef.current.on('remoteStream', handleRemoteStream);
webrtcServiceRef.current.on('vadStatusChange', (speaking: boolean) => {
updateStatus({ isUserSpeaking: speaking });
});
webrtcServiceRef.current.on('error', (error: string) => { webrtcServiceRef.current.on('error', (error: string) => {
console.error('WebRTC error:', error); console.error('WebRTC error:', error);
@ -253,22 +239,42 @@ const useCall = () => {
useEffect(() => { useEffect(() => {
const cleanupFns = [ const cleanupFns = [
addEventListener(WebSocketEvents.WEBRTC_ANSWER, (answer: RTCSessionDescriptionInit) => { addEventListener(WebSocketEvents.WEBRTC_ANSWER, (answer: RTCSessionDescriptionInit) => {
console.log('Received WebRTC answer:', answer);
webrtcServiceRef.current?.handleAnswer(answer); webrtcServiceRef.current?.handleAnswer(answer);
}), }),
addEventListener(WebSocketEvents.ICE_CANDIDATE, (candidate: RTCIceCandidateInit) => { addEventListener(WebSocketEvents.ICE_CANDIDATE, (candidate: RTCIceCandidateInit) => {
console.log('Received ICE candidate:', candidate);
webrtcServiceRef.current?.addIceCandidate(candidate); webrtcServiceRef.current?.addIceCandidate(candidate);
}), }),
]; ];
return () => cleanupFns.forEach((fn) => fn()); return () => cleanupFns.forEach((fn) => fn());
}, [addEventListener]); }, [addEventListener, updateStatus]);
const toggleMute = useCallback(() => {
if (webrtcServiceRef.current) {
const newMutedState = !isMuted;
webrtcServiceRef.current.setMuted(newMutedState);
setIsMuted(newMutedState);
}
}, [isMuted]);
useEffect(() => {
if (webrtcServiceRef.current) {
const handleMuteChange = (muted: boolean) => setIsMuted(muted);
webrtcServiceRef.current.on('muteStateChange', handleMuteChange);
return () => {
webrtcServiceRef.current?.off('muteStateChange', handleMuteChange);
};
}
}, []);
return { return {
...status, ...status,
isMuted,
toggleMute,
startCall, startCall,
hangUp, hangUp,
vadLoading: vad.loading,
vadError: vad.errored,
}; };
}; };

View file

@ -1,4 +1,6 @@
import { useEffect } from 'react';
import { EventEmitter } from 'events'; import { EventEmitter } from 'events';
import { useMicVAD } from '@ricky0123/vad-react';
import type { MessagePayload } from '~/common'; import type { MessagePayload } from '~/common';
export enum ConnectionState { export enum ConnectionState {
@ -24,6 +26,51 @@ interface WebRTCConfig {
debug?: boolean; debug?: boolean;
} }
export function useVADSetup(webrtcService: WebRTCService | null) {
const vad = useMicVAD({
startOnLoad: true,
onSpeechStart: () => {
// Only emit speech events if not muted
if (webrtcService && !webrtcService.isMuted()) {
webrtcService.handleVADStatusChange(true);
}
},
onSpeechEnd: () => {
// Only emit speech events if not muted
if (webrtcService && !webrtcService.isMuted()) {
webrtcService.handleVADStatusChange(false);
}
},
onVADMisfire: () => {
if (webrtcService && !webrtcService.isMuted()) {
webrtcService.handleVADStatusChange(false);
}
},
});
// Add effect to handle mute state changes
useEffect(() => {
if (webrtcService) {
const handleMuteChange = (muted: boolean) => {
if (muted) {
// Stop VAD processing when muted
vad.pause();
} else {
// Resume VAD processing when unmuted
vad.start();
}
};
webrtcService.on('muteStateChange', handleMuteChange);
return () => {
webrtcService.off('muteStateChange', handleMuteChange);
};
}
}, [webrtcService, vad]);
return vad;
}
export class WebRTCService extends EventEmitter { export class WebRTCService extends EventEmitter {
private peerConnection: RTCPeerConnection | null = null; private peerConnection: RTCPeerConnection | null = null;
private localStream: MediaStream | null = null; private localStream: MediaStream | null = null;
@ -34,6 +81,8 @@ export class WebRTCService extends EventEmitter {
private connectionState: ConnectionState = ConnectionState.IDLE; private connectionState: ConnectionState = ConnectionState.IDLE;
private mediaState: MediaState = MediaState.INACTIVE; private mediaState: MediaState = MediaState.INACTIVE;
private isUserSpeaking = false;
private readonly DEFAULT_CONFIG: Required<WebRTCConfig> = { private readonly DEFAULT_CONFIG: Required<WebRTCConfig> = {
iceServers: [ iceServers: [
{ {
@ -72,6 +121,76 @@ export class WebRTCService extends EventEmitter {
this.log('Media state changed to:', state); this.log('Media state changed to:', state);
} }
public handleVADStatusChange(isSpeaking: boolean) {
if (this.isUserSpeaking !== isSpeaking) {
this.isUserSpeaking = isSpeaking;
this.sendMessage({
type: 'vad-status',
payload: { speaking: isSpeaking },
});
this.emit('vadStatusChange', isSpeaking);
}
}
public setMuted(muted: boolean) {
if (this.localStream) {
this.localStream.getAudioTracks().forEach((track) => {
// Stop the track completely when muted instead of just disabling
if (muted) {
track.stop();
} else {
// If unmuting, we need to get a new audio track
this.refreshAudioTrack();
}
});
if (muted) {
// Ensure VAD knows we're not speaking when muted
this.handleVADStatusChange(false);
}
this.emit('muteStateChange', muted);
}
}
public isMuted(): boolean {
if (!this.localStream) {
return false;
}
const audioTrack = this.localStream.getAudioTracks()[0];
return audioTrack ? !audioTrack.enabled : false;
}
private async refreshAudioTrack() {
try {
const newStream = await navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: true,
noiseSuppression: true,
autoGainControl: true,
},
});
const newTrack = newStream.getAudioTracks()[0];
if (this.localStream && this.peerConnection) {
const oldTrack = this.localStream.getAudioTracks()[0];
if (oldTrack) {
this.localStream.removeTrack(oldTrack);
}
this.localStream.addTrack(newTrack);
// Update the sender with the new track
const senders = this.peerConnection.getSenders();
const audioSender = senders.find((sender) => sender.track?.kind === 'audio');
if (audioSender) {
audioSender.replaceTrack(newTrack);
}
}
} catch (error) {
this.handleError(error);
}
}
async initialize() { async initialize() {
try { try {
this.setConnectionState(ConnectionState.CONNECTING); this.setConnectionState(ConnectionState.CONNECTING);
@ -101,9 +220,7 @@ export class WebRTCService extends EventEmitter {
}); });
this.startConnectionTimeout(); this.startConnectionTimeout();
await this.createAndSendOffer(); await this.createAndSendOffer();
this.setMediaState(MediaState.ACTIVE); this.setMediaState(MediaState.ACTIVE);
} catch (error) { } catch (error) {
this.log('Initialization error:', error); this.log('Initialization error:', error);
@ -131,15 +248,12 @@ export class WebRTCService extends EventEmitter {
}); });
if (track.kind === 'audio') { if (track.kind === 'audio') {
// Create remote stream if needed
if (!this.remoteStream) { if (!this.remoteStream) {
this.remoteStream = new MediaStream(); this.remoteStream = new MediaStream();
} }
// Add incoming track to remote stream
this.remoteStream.addTrack(track); this.remoteStream.addTrack(track);
// Echo back the track
if (this.peerConnection) { if (this.peerConnection) {
this.peerConnection.addTrack(track, this.remoteStream); this.peerConnection.addTrack(track, this.remoteStream);
} }
@ -163,7 +277,7 @@ export class WebRTCService extends EventEmitter {
switch (state) { switch (state) {
case 'connected': case 'connected':
this.clearConnectionTimeout(); // Clear timeout when connected this.clearConnectionTimeout();
this.setConnectionState(ConnectionState.CONNECTED); this.setConnectionState(ConnectionState.CONNECTED);
break; break;
case 'disconnected': case 'disconnected':
@ -232,7 +346,6 @@ export class WebRTCService extends EventEmitter {
private startConnectionTimeout() { private startConnectionTimeout() {
this.clearConnectionTimeout(); this.clearConnectionTimeout();
this.connectionTimeoutId = setTimeout(() => { this.connectionTimeoutId = setTimeout(() => {
// Only timeout if we're not in a connected or connecting state
if ( if (
this.connectionState !== ConnectionState.CONNECTED && this.connectionState !== ConnectionState.CONNECTED &&
this.connectionState !== ConnectionState.CONNECTING this.connectionState !== ConnectionState.CONNECTING
@ -276,13 +389,11 @@ export class WebRTCService extends EventEmitter {
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'; const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred';
this.log('Error:', errorMessage); this.log('Error:', errorMessage);
// Don't set failed state if we're already connected
if (this.connectionState !== ConnectionState.CONNECTED) { if (this.connectionState !== ConnectionState.CONNECTED) {
this.setConnectionState(ConnectionState.FAILED); this.setConnectionState(ConnectionState.FAILED);
this.emit('error', errorMessage); this.emit('error', errorMessage);
} }
// Only close if we're not connected
if (this.connectionState !== ConnectionState.CONNECTED) { if (this.connectionState !== ConnectionState.CONNECTED) {
this.close(); this.close();
} }

121
package-lock.json generated
View file

@ -1646,6 +1646,7 @@
"@radix-ui/react-switch": "^1.0.3", "@radix-ui/react-switch": "^1.0.3",
"@radix-ui/react-tabs": "^1.0.3", "@radix-ui/react-tabs": "^1.0.3",
"@radix-ui/react-toast": "^1.1.5", "@radix-ui/react-toast": "^1.1.5",
"@ricky0123/vad-react": "^0.0.28",
"@tanstack/react-query": "^4.28.0", "@tanstack/react-query": "^4.28.0",
"@tanstack/react-table": "^8.11.7", "@tanstack/react-table": "^8.11.7",
"class-variance-authority": "^0.6.0", "class-variance-authority": "^0.6.0",
@ -14283,6 +14284,29 @@
"node": ">=14.0.0" "node": ">=14.0.0"
} }
}, },
"node_modules/@ricky0123/vad-react": {
"version": "0.0.28",
"resolved": "https://registry.npmjs.org/@ricky0123/vad-react/-/vad-react-0.0.28.tgz",
"integrity": "sha512-V2vcxhT31/tXCxqlYLJz+JzywXijMWUhp2FN30OL/NeuSwwprArhaAoUZSdjg6Hzsfe5t2lwASoUaEmGrQ/S+Q==",
"license": "ISC",
"dependencies": {
"@ricky0123/vad-web": "0.0.22",
"onnxruntime-web": "1.14.0"
},
"peerDependencies": {
"react": "18",
"react-dom": "18"
}
},
"node_modules/@ricky0123/vad-web": {
"version": "0.0.22",
"resolved": "https://registry.npmjs.org/@ricky0123/vad-web/-/vad-web-0.0.22.tgz",
"integrity": "sha512-679R6sfwXx4jkquK+FJ9RC2W29oulWC+9ZINK6LVpuy90IBV7UaTGNN79oQXufpJTJs5z4X/22nw1DQ4+Rh8CA==",
"license": "ISC",
"dependencies": {
"onnxruntime-web": "1.14.0"
}
},
"node_modules/@rollup/plugin-alias": { "node_modules/@rollup/plugin-alias": {
"version": "5.1.0", "version": "5.1.0",
"resolved": "https://registry.npmjs.org/@rollup/plugin-alias/-/plugin-alias-5.1.0.tgz", "resolved": "https://registry.npmjs.org/@rollup/plugin-alias/-/plugin-alias-5.1.0.tgz",
@ -16546,6 +16570,12 @@
"@types/node": "*" "@types/node": "*"
} }
}, },
"node_modules/@types/long": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.2.tgz",
"integrity": "sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==",
"license": "MIT"
},
"node_modules/@types/mdast": { "node_modules/@types/mdast": {
"version": "4.0.4", "version": "4.0.4",
"resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz", "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
@ -22086,6 +22116,18 @@
"node": ">=16" "node": ">=16"
} }
}, },
"node_modules/flatbuffers": {
"version": "1.12.0",
"resolved": "https://registry.npmjs.org/flatbuffers/-/flatbuffers-1.12.0.tgz",
"integrity": "sha512-c7CZADjRcl6j0PlvFy0ZqXQ67qSEZfrVPynmnL+2zPc+NtMvrF8Y0QceMo7QqnSPc7+uWjUIAbvCQ5WIKlMVdQ==",
"license": "SEE LICENSE IN LICENSE.txt"
},
"node_modules/flatbuffers": {
"version": "1.12.0",
"resolved": "https://registry.npmjs.org/flatbuffers/-/flatbuffers-1.12.0.tgz",
"integrity": "sha512-c7CZADjRcl6j0PlvFy0ZqXQ67qSEZfrVPynmnL+2zPc+NtMvrF8Y0QceMo7QqnSPc7+uWjUIAbvCQ5WIKlMVdQ==",
"license": "SEE LICENSE IN LICENSE.txt"
},
"node_modules/flatted": { "node_modules/flatted": {
"version": "3.3.2", "version": "3.3.2",
"resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.2.tgz", "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.2.tgz",
@ -22952,6 +22994,12 @@
"node": ">=12" "node": ">=12"
} }
}, },
"node_modules/guid-typescript": {
"version": "1.0.9",
"resolved": "https://registry.npmjs.org/guid-typescript/-/guid-typescript-1.0.9.tgz",
"integrity": "sha512-Y8T4vYhEfwJOTbouREvG+3XDsjr8E3kIr7uf+JZ0BYloFsttiHU0WfvANVsR7TxNUJa/WpCnw/Ino/p+DeBhBQ==",
"license": "ISC"
},
"node_modules/hamt_plus": { "node_modules/hamt_plus": {
"version": "1.0.2", "version": "1.0.2",
"resolved": "https://registry.npmjs.org/hamt_plus/-/hamt_plus-1.0.2.tgz", "resolved": "https://registry.npmjs.org/hamt_plus/-/hamt_plus-1.0.2.tgz",
@ -29400,6 +29448,73 @@
"url": "https://github.com/sponsors/sindresorhus" "url": "https://github.com/sponsors/sindresorhus"
} }
}, },
"node_modules/onnx-proto": {
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/onnx-proto/-/onnx-proto-4.0.4.tgz",
"integrity": "sha512-aldMOB3HRoo6q/phyB6QRQxSt895HNNw82BNyZ2CMh4bjeKv7g/c+VpAFtJuEMVfYLMbRx61hbuqnKceLeDcDA==",
"license": "MIT",
"dependencies": {
"protobufjs": "^6.8.8"
}
},
"node_modules/onnx-proto/node_modules/long": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz",
"integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==",
"license": "Apache-2.0"
},
"node_modules/onnx-proto/node_modules/protobufjs": {
"version": "6.11.4",
"resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.11.4.tgz",
"integrity": "sha512-5kQWPaJHi1WoCpjTGszzQ32PG2F4+wRY6BmAT4Vfw56Q2FZ4YZzK20xUYQH4YkfehY1e6QSICrJquM6xXZNcrw==",
"hasInstallScript": true,
"license": "BSD-3-Clause",
"dependencies": {
"@protobufjs/aspromise": "^1.1.2",
"@protobufjs/base64": "^1.1.2",
"@protobufjs/codegen": "^2.0.4",
"@protobufjs/eventemitter": "^1.1.0",
"@protobufjs/fetch": "^1.1.0",
"@protobufjs/float": "^1.0.2",
"@protobufjs/inquire": "^1.1.0",
"@protobufjs/path": "^1.1.2",
"@protobufjs/pool": "^1.1.0",
"@protobufjs/utf8": "^1.1.0",
"@types/long": "^4.0.1",
"@types/node": ">=13.7.0",
"long": "^4.0.0"
},
"bin": {
"pbjs": "bin/pbjs",
"pbts": "bin/pbts"
}
},
"node_modules/onnxruntime-common": {
"version": "1.14.0",
"resolved": "https://registry.npmjs.org/onnxruntime-common/-/onnxruntime-common-1.14.0.tgz",
"integrity": "sha512-3LJpegM2iMNRX2wUmtYfeX/ytfOzNwAWKSq1HbRrKc9+uqG/FsEA0bbKZl1btQeZaXhC26l44NWpNUeXPII7Ew==",
"license": "MIT"
},
"node_modules/onnxruntime-web": {
"version": "1.14.0",
"resolved": "https://registry.npmjs.org/onnxruntime-web/-/onnxruntime-web-1.14.0.tgz",
"integrity": "sha512-Kcqf43UMfW8mCydVGcX9OMXI2VN17c0p6XvR7IPSZzBf/6lteBzXHvcEVWDPmCKuGombl997HgLqj91F11DzXw==",
"license": "MIT",
"dependencies": {
"flatbuffers": "^1.12.0",
"guid-typescript": "^1.0.9",
"long": "^4.0.0",
"onnx-proto": "^4.0.4",
"onnxruntime-common": "~1.14.0",
"platform": "^1.3.6"
}
},
"node_modules/onnxruntime-web/node_modules/long": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz",
"integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==",
"license": "Apache-2.0"
},
"node_modules/openai": { "node_modules/openai": {
"version": "4.80.1", "version": "4.80.1",
"resolved": "https://registry.npmjs.org/openai/-/openai-4.80.1.tgz", "resolved": "https://registry.npmjs.org/openai/-/openai-4.80.1.tgz",
@ -30092,6 +30207,12 @@
"node": ">=8" "node": ">=8"
} }
}, },
"node_modules/platform": {
"version": "1.3.6",
"resolved": "https://registry.npmjs.org/platform/-/platform-1.3.6.tgz",
"integrity": "sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==",
"license": "MIT"
},
"node_modules/playwright": { "node_modules/playwright": {
"version": "1.50.1", "version": "1.50.1",
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.50.1.tgz", "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.50.1.tgz",