feat: Enhance Stream Management with Abort Functionality

- Updated the abort endpoint to support aborting ongoing generation streams using either streamId or conversationId.
- Introduced a new mutation hook `useAbortStreamMutation` for client-side integration.
- Added `useStreamStatus` query to monitor stream status and facilitate resuming conversations.
- Enhanced `useChatHelpers` to incorporate abort functionality when stopping generation.
- Improved `useResumableSSE` to handle stream errors and token refresh seamlessly.
- Updated `useResumeOnLoad` to check for active streams and resume conversations appropriately.
This commit is contained in:
Danny Avila 2025-12-11 21:19:43 -05:00
parent ff14cd3b44
commit 1853b4a189
No known key found for this signature in database
GPG key ID: BF31EEB2C5CA0956
11 changed files with 295 additions and 136 deletions

View file

@ -140,6 +140,10 @@ const ResumableAgentController = async (req, res, next, initializeClient, addTit
client = result.client; client = result.client;
if (client?.sender) {
GenerationJobManager.updateMetadata(streamId, { sender: client.sender });
}
res.json({ streamId, status: 'started' }); res.json({ streamId, status: 'started' });
let conversationId = reqConversationId; let conversationId = reqConversationId;

View file

@ -156,12 +156,27 @@ router.post('/chat/abort', (req, res) => {
logger.debug(`[AgentStream] Method: ${req.method}, Path: ${req.path}`); logger.debug(`[AgentStream] Method: ${req.method}, Path: ${req.path}`);
logger.debug(`[AgentStream] Body:`, req.body); logger.debug(`[AgentStream] Body:`, req.body);
const { streamId, abortKey } = req.body; const { streamId, conversationId, abortKey } = req.body;
// Try to find job by streamId first, then by conversationId, then by abortKey
let jobStreamId = streamId;
let job = jobStreamId ? GenerationJobManager.getJob(jobStreamId) : null;
if (!job && conversationId) {
job = GenerationJobManager.getJobByConversation(conversationId);
if (job) {
jobStreamId = job.streamId;
}
}
if (!job && abortKey) {
jobStreamId = abortKey.split(':')[0];
job = GenerationJobManager.getJob(jobStreamId);
}
const jobStreamId = streamId || abortKey?.split(':')?.[0];
logger.debug(`[AgentStream] Computed jobStreamId: ${jobStreamId}`); logger.debug(`[AgentStream] Computed jobStreamId: ${jobStreamId}`);
if (jobStreamId && GenerationJobManager.hasJob(jobStreamId)) { if (job && jobStreamId) {
logger.debug(`[AgentStream] Job found, aborting: ${jobStreamId}`); logger.debug(`[AgentStream] Job found, aborting: ${jobStreamId}`);
GenerationJobManager.abortJob(jobStreamId); GenerationJobManager.abortJob(jobStreamId);
logger.debug(`[AgentStream] Job aborted successfully: ${jobStreamId}`); logger.debug(`[AgentStream] Job aborted successfully: ${jobStreamId}`);

View file

@ -0,0 +1,2 @@
export * from './queries';
export * from './mutations';

View file

@ -0,0 +1,39 @@
import { useMutation } from '@tanstack/react-query';
import { request } from 'librechat-data-provider';
export interface AbortStreamParams {
/** The stream ID to abort (if known) */
streamId?: string;
/** The conversation ID to abort (backend will look up the job) */
conversationId?: string;
}
export interface AbortStreamResponse {
success: boolean;
aborted?: string;
error?: string;
}
/**
* Abort an ongoing generation stream.
* The backend will emit a `done` event with `aborted: true` to the SSE stream,
* allowing the client to handle cleanup via the normal event flow.
*
* Can pass either streamId or conversationId - backend will find the job.
*/
export const abortStream = async (params: AbortStreamParams): Promise<AbortStreamResponse> => {
console.log('[abortStream] Calling abort endpoint with params:', params);
const result = (await request.post('/api/agents/chat/abort', params)) as AbortStreamResponse;
console.log('[abortStream] Abort response:', result);
return result;
};
/**
* React Query mutation hook for aborting a generation stream.
* Use this when the user explicitly clicks the stop button.
*/
export function useAbortStreamMutation() {
return useMutation({
mutationFn: abortStream,
});
}

View file

@ -1,5 +1,6 @@
import { useQuery } from '@tanstack/react-query'; import { useQuery } from '@tanstack/react-query';
import { request } from 'librechat-data-provider'; import { request } from 'librechat-data-provider';
import type { Agents } from 'librechat-data-provider';
export interface StreamStatusResponse { export interface StreamStatusResponse {
active: boolean; active: boolean;
@ -8,6 +9,7 @@ export interface StreamStatusResponse {
chunkCount?: number; chunkCount?: number;
aggregatedContent?: Array<{ type: string; text?: string }>; aggregatedContent?: Array<{ type: string; text?: string }>;
createdAt?: number; createdAt?: number;
resumeState?: Agents.ResumeState;
} }
/** /**
@ -19,8 +21,12 @@ export const streamStatusQueryKey = (conversationId: string) => ['streamStatus',
* Fetch stream status for a conversation * Fetch stream status for a conversation
*/ */
export const fetchStreamStatus = async (conversationId: string): Promise<StreamStatusResponse> => { export const fetchStreamStatus = async (conversationId: string): Promise<StreamStatusResponse> => {
const response = await request.get(`/api/agents/chat/status/${conversationId}`); console.log('[fetchStreamStatus] Fetching status for:', conversationId);
return response.data; const result = await request.get<StreamStatusResponse>(
`/api/agents/chat/status/${conversationId}`,
);
console.log('[fetchStreamStatus] Result:', result);
return result;
}; };
/** /**

View file

@ -15,3 +15,4 @@ export * from './queries';
export * from './roles'; export * from './roles';
export * from './tags'; export * from './tags';
export * from './MCP'; export * from './MCP';
export * from './SSE';

View file

@ -1,10 +1,10 @@
import { useCallback, useState } from 'react'; import { useCallback, useState } from 'react';
import { QueryKeys } from 'librechat-data-provider'; import { QueryKeys, isAssistantsEndpoint } from 'librechat-data-provider';
import { useQueryClient } from '@tanstack/react-query'; import { useQueryClient } from '@tanstack/react-query';
import { useRecoilState, useResetRecoilState, useSetRecoilState } from 'recoil'; import { useRecoilState, useResetRecoilState, useSetRecoilState } from 'recoil';
import type { TMessage } from 'librechat-data-provider'; import type { TMessage } from 'librechat-data-provider';
import { useAbortStreamMutation, useGetMessagesByConvoId } from '~/data-provider';
import useChatFunctions from '~/hooks/Chat/useChatFunctions'; import useChatFunctions from '~/hooks/Chat/useChatFunctions';
import { useGetMessagesByConvoId } from '~/data-provider';
import { useAuthContext } from '~/hooks/AuthContext'; import { useAuthContext } from '~/hooks/AuthContext';
import useNewConvo from '~/hooks/useNewConvo'; import useNewConvo from '~/hooks/useNewConvo';
import store from '~/store'; import store from '~/store';
@ -17,11 +17,12 @@ export default function useChatHelpers(index = 0, paramId?: string) {
const queryClient = useQueryClient(); const queryClient = useQueryClient();
const { isAuthenticated } = useAuthContext(); const { isAuthenticated } = useAuthContext();
const abortMutation = useAbortStreamMutation();
const { newConversation } = useNewConvo(index); const { newConversation } = useNewConvo(index);
const { useCreateConversationAtom } = store; const { useCreateConversationAtom } = store;
const { conversation, setConversation } = useCreateConversationAtom(index); const { conversation, setConversation } = useCreateConversationAtom(index);
const { conversationId } = conversation ?? {}; const { conversationId, endpoint, endpointType } = conversation ?? {};
const queryParam = paramId === 'new' ? paramId : (conversationId ?? paramId ?? ''); const queryParam = paramId === 'new' ? paramId : (conversationId ?? paramId ?? '');
@ -107,7 +108,43 @@ export default function useChatHelpers(index = 0, paramId?: string) {
} }
}; };
const stopGenerating = () => clearAllSubmissions(); /**
* Stop generation - for non-assistants endpoints, calls abort endpoint first.
* The abort endpoint will cause the backend to emit a `done` event with `aborted: true`,
* which will be handled by the SSE event handler to clean up UI.
* Assistants endpoint has its own abort mechanism via useEventHandlers.abortConversation.
*/
const stopGenerating = useCallback(async () => {
const actualEndpoint = endpointType ?? endpoint;
const isAssistants = isAssistantsEndpoint(actualEndpoint);
console.log('[useChatHelpers] stopGenerating called', {
conversationId,
endpoint,
endpointType,
actualEndpoint,
isAssistants,
});
// For non-assistants endpoints (using resumable streams), call abort endpoint first
if (conversationId && !isAssistants) {
try {
console.log('[useChatHelpers] Calling abort mutation for:', conversationId);
await abortMutation.mutateAsync({ conversationId });
console.log('[useChatHelpers] Abort mutation succeeded');
// The SSE will receive a `done` event with `aborted: true` and clean up
// We still clear submissions as a fallback
clearAllSubmissions();
} catch (error) {
console.error('[useChatHelpers] Abort failed:', error);
// Fall back to clearing submissions
clearAllSubmissions();
}
} else {
// For assistants endpoints, just clear submissions (existing behavior)
console.log('[useChatHelpers] Assistants endpoint, just clearing submissions');
clearAllSubmissions();
}
}, [conversationId, endpoint, endpointType, abortMutation, clearAllSubmissions]);
const handleStopGenerating = (e: React.MouseEvent<HTMLButtonElement>) => { const handleStopGenerating = (e: React.MouseEvent<HTMLButtonElement>) => {
e.preventDefault(); e.preventDefault();

View file

@ -11,7 +11,6 @@ import {
} from 'librechat-data-provider'; } from 'librechat-data-provider';
import type { TMessage, TPayload, TSubmission, EventSubmission } from 'librechat-data-provider'; import type { TMessage, TPayload, TSubmission, EventSubmission } from 'librechat-data-provider';
import type { EventHandlerParams } from './useEventHandlers'; import type { EventHandlerParams } from './useEventHandlers';
import type { TResData } from '~/common';
import { useGenTitleMutation, useGetStartupConfig, useGetUserBalance } from '~/data-provider'; import { useGenTitleMutation, useGetStartupConfig, useGetUserBalance } from '~/data-provider';
import { useAuthContext } from '~/hooks/AuthContext'; import { useAuthContext } from '~/hooks/AuthContext';
import useEventHandlers from './useEventHandlers'; import useEventHandlers from './useEventHandlers';
@ -43,6 +42,11 @@ const MAX_RETRIES = 5;
* Hook for resumable SSE streams. * Hook for resumable SSE streams.
* Separates generation start (POST) from stream subscription (GET EventSource). * Separates generation start (POST) from stream subscription (GET EventSource).
* Supports auto-reconnection with exponential backoff. * Supports auto-reconnection with exponential backoff.
*
* Key behavior:
* - Navigation away does NOT abort the generation (just closes SSE)
* - Only explicit abort (via stop button backend abort endpoint) stops generation
* - Backend emits `done` event with `aborted: true` on abort, handled via finalHandler
*/ */
export default function useResumableSSE( export default function useResumableSSE(
submission: TSubmission | null, submission: TSubmission | null,
@ -83,7 +87,6 @@ export default function useResumableSSE(
contentHandler, contentHandler,
createdHandler, createdHandler,
attachmentHandler, attachmentHandler,
abortConversation,
} = useEventHandlers({ } = useEventHandlers({
genTitle, genTitle,
setMessages, setMessages,
@ -104,6 +107,7 @@ export default function useResumableSSE(
/** /**
* Subscribe to stream via SSE library (supports custom headers) * Subscribe to stream via SSE library (supports custom headers)
* Follows same auth pattern as useSSE
*/ */
const subscribeToStream = useCallback( const subscribeToStream = useCallback(
(currentStreamId: string, currentSubmission: TSubmission) => { (currentStreamId: string, currentSubmission: TSubmission) => {
@ -131,6 +135,11 @@ export default function useResumableSSE(
const data = JSON.parse(e.data); const data = JSON.parse(e.data);
if (data.final != null) { if (data.final != null) {
console.log('[ResumableSSE] Received FINAL event', {
aborted: data.aborted,
conversationId: data.conversation?.conversationId,
hasResponseMessage: !!data.responseMessage,
});
clearDraft(currentSubmission.conversation?.conversationId); clearDraft(currentSubmission.conversation?.conversationId);
try { try {
finalHandler(data, currentSubmission as EventSubmission); finalHandler(data, currentSubmission as EventSubmission);
@ -146,6 +155,10 @@ export default function useResumableSSE(
} }
if (data.created != null) { if (data.created != null) {
console.log('[ResumableSSE] Received CREATED event', {
messageId: data.message?.messageId,
conversationId: data.message?.conversationId,
});
const runId = v4(); const runId = v4();
setActiveRunId(runId); setActiveRunId(runId);
userMessage = { userMessage = {
@ -171,6 +184,10 @@ export default function useResumableSSE(
} }
if (data.sync != null) { if (data.sync != null) {
console.log('[ResumableSSE] Received SYNC event', {
conversationId: data.conversationId,
hasResumeState: !!data.resumeState,
});
const runId = v4(); const runId = v4();
setActiveRunId(runId); setActiveRunId(runId);
syncHandler(data, { ...currentSubmission, userMessage } as EventSubmission); syncHandler(data, { ...currentSubmission, userMessage } as EventSubmission);
@ -200,68 +217,33 @@ export default function useResumableSSE(
} }
}); });
// Handle cancel event (triggered when stop button is clicked)
sse.addEventListener('cancel', async () => {
console.log('[ResumableSSE] Cancel requested, aborting job');
sse.close();
// Call abort endpoint to stop backend generation
try {
await fetch('/api/agents/chat/abort', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`,
},
body: JSON.stringify({ streamId: currentStreamId }),
});
} catch (error) {
console.error('[ResumableSSE] Error aborting job:', error);
}
// Handle UI cleanup via abortConversation
const latestMessages = getMessages();
const conversationId = latestMessages?.[latestMessages.length - 1]?.conversationId;
try {
await abortConversation(
conversationId ??
userMessage.conversationId ??
currentSubmission.conversation?.conversationId ??
'',
currentSubmission as EventSubmission,
latestMessages,
);
} catch (error) {
console.error('[ResumableSSE] Error during abort:', error);
setIsSubmitting(false);
setShowStopButton(false);
}
setStreamId(null);
});
sse.addEventListener('error', async (e: MessageEvent) => { sse.addEventListener('error', async (e: MessageEvent) => {
console.log('[ResumableSSE] Stream error, connection closed'); console.log('[ResumableSSE] Stream error');
sse.close(); (startupConfig?.balance?.enabled ?? false) && balanceQuery.refetch();
// Check for 401 and try to refresh token // Check for 401 and try to refresh token (same pattern as useSSE)
/* @ts-ignore */ /* @ts-ignore */
if (e.responseCode === 401) { if (e.responseCode === 401) {
try { try {
const refreshResponse = await request.refreshToken(); const refreshResponse = await request.refreshToken();
const newToken = refreshResponse?.token ?? ''; const newToken = refreshResponse?.token ?? '';
if (newToken) { if (!newToken) {
request.dispatchTokenUpdatedEvent(newToken); throw new Error('Token refresh failed.');
// Retry with new token
if (submissionRef.current) {
subscribeToStream(currentStreamId, submissionRef.current);
}
return;
} }
// Update headers on same SSE instance and retry (like useSSE)
sse.headers = {
Authorization: `Bearer ${newToken}`,
};
request.dispatchTokenUpdatedEvent(newToken);
sse.stream();
return;
} catch (error) { } catch (error) {
console.log('[ResumableSSE] Token refresh failed:', error); console.log('[ResumableSSE] Token refresh failed:', error);
} }
} }
sse.close();
if (reconnectAttemptRef.current < MAX_RETRIES) { if (reconnectAttemptRef.current < MAX_RETRIES) {
reconnectAttemptRef.current++; reconnectAttemptRef.current++;
const delay = Math.min(1000 * Math.pow(2, reconnectAttemptRef.current - 1), 30000); const delay = Math.min(1000 * Math.pow(2, reconnectAttemptRef.current - 1), 30000);
@ -303,13 +285,12 @@ export default function useResumableSSE(
setIsSubmitting, setIsSubmitting,
startupConfig?.balance?.enabled, startupConfig?.balance?.enabled,
balanceQuery, balanceQuery,
abortConversation,
getMessages,
], ],
); );
/** /**
* Start generation (POST request that returns streamId) * Start generation (POST request that returns streamId)
* Uses request.post which has axios interceptors for automatic token refresh
*/ */
const startGeneration = useCallback( const startGeneration = useCallback(
async (currentSubmission: TSubmission): Promise<string | null> => { async (currentSubmission: TSubmission): Promise<string | null> => {
@ -324,24 +305,10 @@ export default function useResumableSSE(
: `${payloadData.server}?resumable=true`; : `${payloadData.server}?resumable=true`;
try { try {
const response = await fetch(url, { // Use request.post which handles auth token refresh via axios interceptors
method: 'POST', const data = (await request.post(url, payload)) as { streamId: string };
headers: { console.log('[ResumableSSE] Generation started:', { streamId: data.streamId });
'Content-Type': 'application/json', return data.streamId;
Authorization: `Bearer ${token}`,
},
body: JSON.stringify(payload),
});
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.error || `Failed to start generation: ${response.statusText}`);
}
const { streamId: newStreamId } = await response.json();
console.log('[ResumableSSE] Generation started:', { streamId: newStreamId });
return newStreamId;
} catch (error) { } catch (error) {
console.error('[ResumableSSE] Error starting generation:', error); console.error('[ResumableSSE] Error starting generation:', error);
errorHandler({ data: undefined, submission: currentSubmission as EventSubmission }); errorHandler({ data: undefined, submission: currentSubmission as EventSubmission });
@ -349,15 +316,18 @@ export default function useResumableSSE(
return null; return null;
} }
}, },
[token, clearStepMaps, errorHandler, setIsSubmitting], [clearStepMaps, errorHandler, setIsSubmitting],
); );
useEffect(() => { useEffect(() => {
if (!submission || Object.keys(submission).length === 0) { if (!submission || Object.keys(submission).length === 0) {
console.log('[ResumableSSE] No submission, cleaning up');
// Clear reconnect timeout if submission is cleared
if (reconnectTimeoutRef.current) { if (reconnectTimeoutRef.current) {
clearTimeout(reconnectTimeoutRef.current); clearTimeout(reconnectTimeoutRef.current);
reconnectTimeoutRef.current = null; reconnectTimeoutRef.current = null;
} }
// Close SSE but do NOT dispatch cancel - navigation should not abort
if (sseRef.current) { if (sseRef.current) {
sseRef.current.close(); sseRef.current.close();
sseRef.current = null; sseRef.current = null;
@ -368,36 +338,56 @@ export default function useResumableSSE(
return; return;
} }
const resumeStreamId = (submission as TSubmission & { resumeStreamId?: string }).resumeStreamId;
console.log('[ResumableSSE] Effect triggered', {
conversationId: submission.conversation?.conversationId,
hasResumeStreamId: !!resumeStreamId,
resumeStreamId,
userMessageId: submission.userMessage?.messageId,
});
submissionRef.current = submission; submissionRef.current = submission;
const initStream = async () => { const initStream = async () => {
setIsSubmitting(true); setIsSubmitting(true);
setShowStopButton(true);
const newStreamId = await startGeneration(submission); if (resumeStreamId) {
if (newStreamId) { // Resume: just subscribe to existing stream, don't start new generation
setStreamId(newStreamId); console.log('[ResumableSSE] Resuming existing stream:', resumeStreamId);
subscribeToStream(newStreamId, submission); setStreamId(resumeStreamId);
subscribeToStream(resumeStreamId, submission);
} else {
// New generation: start and then subscribe
console.log('[ResumableSSE] Starting NEW generation');
const newStreamId = await startGeneration(submission);
if (newStreamId) {
setStreamId(newStreamId);
subscribeToStream(newStreamId, submission);
} else {
console.error('[ResumableSSE] Failed to get streamId from startGeneration');
}
} }
}; };
initStream(); initStream();
return () => { return () => {
console.log('[ResumableSSE] Cleanup - closing SSE, resetting UI state');
// Cleanup on unmount/navigation - close connection but DO NOT abort backend
// Reset UI state so it doesn't leak to other conversations
// If user returns to this conversation, useResumeOnLoad will restore the state
if (reconnectTimeoutRef.current) { if (reconnectTimeoutRef.current) {
clearTimeout(reconnectTimeoutRef.current); clearTimeout(reconnectTimeoutRef.current);
reconnectTimeoutRef.current = null; reconnectTimeoutRef.current = null;
} }
if (sseRef.current) { if (sseRef.current) {
const isCancelled = sseRef.current.readyState <= 1;
sseRef.current.close(); sseRef.current.close();
if (isCancelled) {
// Dispatch cancel event to trigger abort
const e = new Event('cancel');
/* @ts-ignore */
sseRef.current.dispatchEvent(e);
}
sseRef.current = null; sseRef.current = null;
} }
// Reset UI state on cleanup - useResumeOnLoad will restore if needed
setIsSubmitting(false);
setShowStopButton(false);
}; };
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, [submission]); }, [submission]);

View file

@ -2,6 +2,7 @@ import { useEffect, useRef } from 'react';
import { useSetRecoilState, useRecoilValue } from 'recoil'; import { useSetRecoilState, useRecoilValue } from 'recoil';
import { Constants, tMessageSchema } from 'librechat-data-provider'; import { Constants, tMessageSchema } from 'librechat-data-provider';
import type { TMessage, TConversation, TSubmission, Agents } from 'librechat-data-provider'; import type { TMessage, TConversation, TSubmission, Agents } from 'librechat-data-provider';
import { useStreamStatus } from '~/data-provider';
import store from '~/store'; import store from '~/store';
/** /**
@ -77,7 +78,9 @@ function buildSubmissionFromResumeState(
isRegenerate: false, isRegenerate: false,
isTemporary: false, isTemporary: false,
endpointOption: {}, endpointOption: {},
} as TSubmission; // Signal to useResumableSSE to subscribe to existing stream instead of starting new
resumeStreamId: streamId,
} as TSubmission & { resumeStreamId: string };
} }
/** /**
@ -97,67 +100,121 @@ export default function useResumeOnLoad(
const resumableEnabled = useRecoilValue(store.resumableStreams); const resumableEnabled = useRecoilValue(store.resumableStreams);
const setSubmission = useSetRecoilState(store.submissionByIndex(runIndex)); const setSubmission = useSetRecoilState(store.submissionByIndex(runIndex));
const currentSubmission = useRecoilValue(store.submissionByIndex(runIndex)); const currentSubmission = useRecoilValue(store.submissionByIndex(runIndex));
const hasResumedRef = useRef<string | null>(null); // Track conversations we've already processed (either resumed or skipped)
const processedConvoRef = useRef<string | null>(null);
// Check for active stream when conversation changes // Check for active stream when conversation changes
// const { data: streamStatus, isSuccess } = useStreamStatus( // Only check if resumable is enabled and no active submission
// conversationId, const shouldCheck =
// resumableEnabled && !currentSubmission, // Only check if no active submission resumableEnabled &&
// ); !currentSubmission &&
!!conversationId &&
conversationId !== Constants.NEW_CONVO &&
processedConvoRef.current !== conversationId; // Don't re-check processed convos
const { data: streamStatus, isSuccess } = useStreamStatus(conversationId, shouldCheck);
useEffect(() => { useEffect(() => {
// if (!resumableEnabled || !conversationId || !isSuccess || !streamStatus) { console.log('[ResumeOnLoad] Effect check', {
if (!resumableEnabled || !conversationId) { resumableEnabled,
conversationId,
hasCurrentSubmission: !!currentSubmission,
currentSubmissionConvoId: currentSubmission?.conversation?.conversationId,
isSuccess,
streamStatusActive: streamStatus?.active,
streamStatusStreamId: streamStatus?.streamId,
processedConvoRef: processedConvoRef.current,
});
if (!resumableEnabled || !conversationId || conversationId === Constants.NEW_CONVO) {
console.log('[ResumeOnLoad] Skipping - not enabled or new convo');
return; return;
} }
// Don't resume if we already have an active submission // Don't resume if we already have an active submission (we started it ourselves)
if (currentSubmission) { if (currentSubmission) {
console.log('[ResumeOnLoad] Skipping - already have active submission, marking as processed');
// Mark as processed so we don't try again
processedConvoRef.current = conversationId;
return; return;
} }
// Don't resume the same conversation twice // Wait for stream status query to complete
if (hasResumedRef.current === conversationId) { if (!isSuccess || !streamStatus) {
console.log('[ResumeOnLoad] Waiting for stream status query');
return; return;
} }
// Don't process the same conversation twice
if (processedConvoRef.current === conversationId) {
console.log('[ResumeOnLoad] Skipping - already processed this conversation');
return;
}
// Mark as processed immediately to prevent race conditions
processedConvoRef.current = conversationId;
// Check if there's an active job to resume // Check if there's an active job to resume
// if (!streamStatus.active || !streamStatus.streamId) { if (!streamStatus.active || !streamStatus.streamId) {
// return; console.log('[ResumeOnLoad] No active job to resume for:', conversationId);
// } return;
}
// console.log('[ResumeOnLoad] Found active job, creating submission...', { console.log('[ResumeOnLoad] Found active job, creating submission...', {
// streamId: streamStatus.streamId, streamId: streamStatus.streamId,
// status: streamStatus.status, status: streamStatus.status,
// }); resumeState: streamStatus.resumeState,
});
hasResumedRef.current = conversationId;
const messages = getMessages() || []; const messages = getMessages() || [];
// Minimal submission without resume state // Build submission from resume state if available
const lastMessage = messages[messages.length - 1]; if (streamStatus.resumeState) {
const submission: TSubmission = { const submission = buildSubmissionFromResumeState(
messages, streamStatus.resumeState,
userMessage: lastMessage ?? ({ messageId: 'resume', conversationId, text: '' } as TMessage), streamStatus.streamId,
initialResponse: { messages,
messageId: 'resume_',
conversationId, conversationId,
text: '', );
content: [{ type: 'text', text: '' }], setSubmission(submission);
} as TMessage, } else {
conversation: { conversationId, title: 'Resumed Chat' } as TConversation, // Minimal submission without resume state
isRegenerate: false, const lastUserMessage = [...messages].reverse().find((m) => m.isCreatedByUser);
isTemporary: false, const submission = {
endpointOption: {}, messages,
} as TSubmission; userMessage:
setSubmission(submission); lastUserMessage ?? ({ messageId: 'resume', conversationId, text: '' } as TMessage),
}, [conversationId, resumableEnabled, currentSubmission, getMessages, setSubmission]); initialResponse: {
messageId: 'resume_',
conversationId,
text: '',
content: streamStatus.aggregatedContent ?? [{ type: 'text', text: '' }],
} as TMessage,
conversation: { conversationId, title: 'Resumed Chat' } as TConversation,
isRegenerate: false,
isTemporary: false,
endpointOption: {},
// Signal to useResumableSSE to subscribe to existing stream instead of starting new
resumeStreamId: streamStatus.streamId,
} as TSubmission & { resumeStreamId: string };
setSubmission(submission);
}
}, [
conversationId,
resumableEnabled,
currentSubmission,
isSuccess,
streamStatus,
getMessages,
setSubmission,
]);
// Reset hasResumedRef when conversation changes // Reset processedConvoRef when conversation changes to a different one
useEffect(() => { useEffect(() => {
if (conversationId !== hasResumedRef.current) { if (conversationId && conversationId !== processedConvoRef.current) {
hasResumedRef.current = null; // Only reset if we're navigating to a DIFFERENT conversation
// This allows re-checking when navigating back
processedConvoRef.current = null;
} }
}, [conversationId]); }, [conversationId]);
} }

View file

@ -177,6 +177,7 @@ class GenerationJobManagerClass {
); );
// Create a final event for abort so clients can properly handle UI cleanup // Create a final event for abort so clients can properly handle UI cleanup
const userMessageId = job.metadata.userMessage?.messageId;
const abortFinalEvent = { const abortFinalEvent = {
final: true, final: true,
conversation: { conversation: {
@ -185,18 +186,23 @@ class GenerationJobManagerClass {
title: 'New Chat', title: 'New Chat',
requestMessage: job.metadata.userMessage requestMessage: job.metadata.userMessage
? { ? {
messageId: job.metadata.userMessage.messageId, messageId: userMessageId,
parentMessageId: job.metadata.userMessage.parentMessageId,
conversationId: job.metadata.conversationId, conversationId: job.metadata.conversationId,
text: job.metadata.userMessage.text ?? '', text: job.metadata.userMessage.text ?? '',
isCreatedByUser: true,
} }
: null, : null,
responseMessage: { responseMessage: {
messageId: messageId: job.metadata.responseMessageId ?? `${userMessageId ?? 'aborted'}_`,
job.metadata.responseMessageId ?? `${job.metadata.userMessage?.messageId ?? 'aborted'}_`, parentMessageId: userMessageId, // Link response to user message
conversationId: job.metadata.conversationId, conversationId: job.metadata.conversationId,
content: job.aggregatedContent ?? [], content: job.aggregatedContent ?? [],
sender: job.metadata.sender ?? 'AI',
unfinished: true, unfinished: true,
error: true, /** Not an error - the job was intentionally aborted */
error: false,
isCreatedByUser: false,
}, },
aborted: true, aborted: true,
} as unknown as ServerSentEvent; } as unknown as ServerSentEvent;

View file

@ -9,6 +9,8 @@ export interface GenerationJobMetadata {
userMessage?: Agents.UserMessageMeta; userMessage?: Agents.UserMessageMeta;
/** Response message ID for tracking */ /** Response message ID for tracking */
responseMessageId?: string; responseMessageId?: string;
/** Sender label for the response (e.g., "GPT-4.1", "Claude") */
sender?: string;
} }
export type GenerationJobStatus = 'running' | 'complete' | 'error' | 'aborted'; export type GenerationJobStatus = 'running' | 'complete' | 'error' | 'aborted';