2025-12-03 21:48:04 -05:00
|
|
|
import { logger } from '@librechat/data-schemas';
|
2025-12-11 09:52:15 -05:00
|
|
|
import type { Agents } from 'librechat-data-provider';
|
2025-12-12 01:10:08 -05:00
|
|
|
import type { StandardGraph } from '@librechat/agents';
|
2025-12-12 02:16:24 -05:00
|
|
|
import type { SerializableJobData } from './interfaces/IJobStore';
|
2025-12-12 01:10:08 -05:00
|
|
|
import type * as t from '~/types';
|
2025-12-12 02:16:24 -05:00
|
|
|
import { InMemoryEventTransport } from './implementations/InMemoryEventTransport';
|
|
|
|
|
import { InMemoryContentState } from './implementations/InMemoryContentState';
|
|
|
|
|
import { InMemoryJobStore } from './implementations/InMemoryJobStore';
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Runtime state for active jobs - not serializable, kept in-memory per instance.
|
|
|
|
|
* Contains AbortController, ready promise, and other non-serializable state.
|
|
|
|
|
*/
|
|
|
|
|
interface RuntimeJobState {
|
|
|
|
|
abortController: AbortController;
|
|
|
|
|
readyPromise: Promise<void>;
|
|
|
|
|
resolveReady: () => void;
|
|
|
|
|
finalEvent?: t.ServerSentEvent;
|
|
|
|
|
syncSent: boolean;
|
|
|
|
|
}
|
2025-12-03 21:48:04 -05:00
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Manages generation jobs for resumable LLM streams.
|
2025-12-12 02:16:24 -05:00
|
|
|
* Composes three implementations for clean separation of concerns:
|
|
|
|
|
* - InMemoryJobStore: Serializable job metadata (swappable for Redis)
|
|
|
|
|
* - InMemoryEventTransport: Pub/sub events (swappable for Redis Pub/Sub)
|
|
|
|
|
* - InMemoryContentState: Volatile content refs with WeakRef (always in-memory)
|
2025-12-03 21:48:04 -05:00
|
|
|
*/
|
|
|
|
|
class GenerationJobManagerClass {
|
2025-12-12 02:16:24 -05:00
|
|
|
private jobStore: InMemoryJobStore;
|
|
|
|
|
private eventTransport: InMemoryEventTransport;
|
|
|
|
|
private contentState: InMemoryContentState;
|
|
|
|
|
|
|
|
|
|
/** Runtime state - always in-memory, not serializable */
|
|
|
|
|
private runtimeState = new Map<string, RuntimeJobState>();
|
|
|
|
|
|
2025-12-03 21:48:04 -05:00
|
|
|
private cleanupInterval: NodeJS.Timeout | null = null;
|
2025-12-12 02:16:24 -05:00
|
|
|
|
|
|
|
|
constructor() {
|
|
|
|
|
this.jobStore = new InMemoryJobStore({ ttlAfterComplete: 300000, maxJobs: 1000 });
|
|
|
|
|
this.eventTransport = new InMemoryEventTransport();
|
|
|
|
|
this.contentState = new InMemoryContentState();
|
|
|
|
|
}
|
2025-12-03 21:48:04 -05:00
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Initialize the job manager with periodic cleanup.
|
|
|
|
|
*/
|
|
|
|
|
initialize(): void {
|
|
|
|
|
if (this.cleanupInterval) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-12 02:16:24 -05:00
|
|
|
this.jobStore.initialize();
|
|
|
|
|
|
2025-12-03 21:48:04 -05:00
|
|
|
this.cleanupInterval = setInterval(() => {
|
|
|
|
|
this.cleanup();
|
|
|
|
|
}, 60000);
|
|
|
|
|
|
|
|
|
|
if (this.cleanupInterval.unref) {
|
|
|
|
|
this.cleanupInterval.unref();
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-12 02:16:24 -05:00
|
|
|
logger.debug('[GenerationJobManager] Initialized');
|
2025-12-03 21:48:04 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Create a new generation job.
|
2025-12-12 02:16:24 -05:00
|
|
|
* @returns A facade object compatible with the old GenerationJob interface
|
2025-12-03 21:48:04 -05:00
|
|
|
*/
|
2025-12-12 01:10:08 -05:00
|
|
|
createJob(streamId: string, userId: string, conversationId?: string): t.GenerationJob {
|
2025-12-12 02:16:24 -05:00
|
|
|
// Create serializable job data (sync for in-memory)
|
|
|
|
|
const jobData = this.jobStore.createJobSync(streamId, userId, conversationId);
|
2025-12-03 21:48:04 -05:00
|
|
|
|
2025-12-12 02:16:24 -05:00
|
|
|
// Create runtime state
|
2025-12-03 21:48:04 -05:00
|
|
|
let resolveReady: () => void;
|
|
|
|
|
const readyPromise = new Promise<void>((resolve) => {
|
|
|
|
|
resolveReady = resolve;
|
|
|
|
|
});
|
|
|
|
|
|
2025-12-12 02:16:24 -05:00
|
|
|
const runtime: RuntimeJobState = {
|
2025-12-03 21:48:04 -05:00
|
|
|
abortController: new AbortController(),
|
|
|
|
|
readyPromise,
|
|
|
|
|
resolveReady: resolveReady!,
|
2025-12-12 02:16:24 -05:00
|
|
|
syncSent: false,
|
2025-12-03 21:48:04 -05:00
|
|
|
};
|
2025-12-12 02:16:24 -05:00
|
|
|
this.runtimeState.set(streamId, runtime);
|
2025-12-03 21:48:04 -05:00
|
|
|
|
2025-12-12 02:16:24 -05:00
|
|
|
// Set up all-subscribers-left callback
|
|
|
|
|
this.eventTransport.onAllSubscribersLeft(streamId, () => {
|
|
|
|
|
const currentRuntime = this.runtimeState.get(streamId);
|
|
|
|
|
if (currentRuntime) {
|
|
|
|
|
currentRuntime.syncSent = false;
|
|
|
|
|
}
|
|
|
|
|
const content = this.contentState.getContentParts(streamId) ?? [];
|
|
|
|
|
this.eventTransport.emitChunk(streamId, {
|
|
|
|
|
_internal: 'allSubscribersLeft',
|
|
|
|
|
content,
|
|
|
|
|
});
|
|
|
|
|
logger.debug(`[GenerationJobManager] All subscribers left ${streamId}, reset syncSent`);
|
|
|
|
|
});
|
2025-12-03 21:48:04 -05:00
|
|
|
|
|
|
|
|
logger.debug(`[GenerationJobManager] Created job: ${streamId}`);
|
|
|
|
|
|
2025-12-12 02:16:24 -05:00
|
|
|
// Return facade for backwards compatibility
|
|
|
|
|
return this.buildJobFacade(streamId, jobData, runtime);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Build a GenerationJob facade from job data and runtime state.
|
|
|
|
|
* This maintains backwards compatibility with existing code.
|
|
|
|
|
*/
|
|
|
|
|
private buildJobFacade(
|
|
|
|
|
streamId: string,
|
|
|
|
|
jobData: SerializableJobData,
|
|
|
|
|
runtime: RuntimeJobState,
|
|
|
|
|
): t.GenerationJob {
|
|
|
|
|
// Create a proxy emitter that delegates to eventTransport
|
|
|
|
|
const emitterProxy = {
|
|
|
|
|
on: (event: string, handler: (...args: unknown[]) => void) => {
|
|
|
|
|
if (event === 'allSubscribersLeft') {
|
|
|
|
|
// Subscribe to internal event
|
|
|
|
|
this.eventTransport.subscribe(streamId, {
|
|
|
|
|
onChunk: (e) => {
|
|
|
|
|
const evt = e as Record<string, unknown>;
|
|
|
|
|
if (evt._internal === 'allSubscribersLeft') {
|
|
|
|
|
handler(evt.content);
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
emit: () => {
|
|
|
|
|
/* handled via eventTransport */
|
|
|
|
|
},
|
|
|
|
|
listenerCount: () => this.eventTransport.getSubscriberCount(streamId),
|
|
|
|
|
setMaxListeners: () => {
|
|
|
|
|
/* no-op for proxy */
|
|
|
|
|
},
|
|
|
|
|
removeAllListeners: () => this.eventTransport.cleanup(streamId),
|
|
|
|
|
off: () => {
|
|
|
|
|
/* handled via unsubscribe */
|
|
|
|
|
},
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
streamId,
|
|
|
|
|
emitter: emitterProxy as unknown as t.GenerationJob['emitter'],
|
|
|
|
|
status: jobData.status as t.GenerationJobStatus,
|
|
|
|
|
createdAt: jobData.createdAt,
|
|
|
|
|
completedAt: jobData.completedAt,
|
|
|
|
|
abortController: runtime.abortController,
|
|
|
|
|
error: jobData.error,
|
|
|
|
|
metadata: {
|
|
|
|
|
userId: jobData.userId,
|
|
|
|
|
conversationId: jobData.conversationId,
|
|
|
|
|
userMessage: jobData.userMessage,
|
|
|
|
|
responseMessageId: jobData.responseMessageId,
|
|
|
|
|
sender: jobData.sender,
|
|
|
|
|
},
|
|
|
|
|
readyPromise: runtime.readyPromise,
|
|
|
|
|
resolveReady: runtime.resolveReady,
|
|
|
|
|
finalEvent: runtime.finalEvent,
|
|
|
|
|
syncSent: runtime.syncSent,
|
|
|
|
|
};
|
2025-12-03 21:48:04 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Get a job by streamId.
|
|
|
|
|
*/
|
2025-12-12 01:10:08 -05:00
|
|
|
getJob(streamId: string): t.GenerationJob | undefined {
|
2025-12-12 02:16:24 -05:00
|
|
|
const jobData = this.jobStore.getJobSync(streamId);
|
|
|
|
|
const runtime = this.runtimeState.get(streamId);
|
|
|
|
|
if (!jobData || !runtime) {
|
|
|
|
|
return undefined;
|
|
|
|
|
}
|
|
|
|
|
return this.buildJobFacade(streamId, jobData, runtime);
|
2025-12-03 21:48:04 -05:00
|
|
|
}
|
|
|
|
|
|
2025-12-04 08:57:13 -05:00
|
|
|
/**
|
|
|
|
|
* Find an active job by conversationId.
|
|
|
|
|
*/
|
2025-12-12 01:10:08 -05:00
|
|
|
getJobByConversation(conversationId: string): t.GenerationJob | undefined {
|
2025-12-12 02:16:24 -05:00
|
|
|
const jobData = this.jobStore.getJobByConversationSync(conversationId);
|
|
|
|
|
if (!jobData) {
|
|
|
|
|
return undefined;
|
2025-12-04 08:57:13 -05:00
|
|
|
}
|
2025-12-12 02:16:24 -05:00
|
|
|
const runtime = this.runtimeState.get(jobData.streamId);
|
|
|
|
|
if (!runtime) {
|
|
|
|
|
return undefined;
|
2025-12-04 08:57:13 -05:00
|
|
|
}
|
2025-12-12 02:16:24 -05:00
|
|
|
return this.buildJobFacade(jobData.streamId, jobData, runtime);
|
2025-12-04 08:57:13 -05:00
|
|
|
}
|
|
|
|
|
|
2025-12-03 21:48:04 -05:00
|
|
|
/**
|
|
|
|
|
* Check if a job exists.
|
|
|
|
|
*/
|
|
|
|
|
hasJob(streamId: string): boolean {
|
2025-12-12 02:16:24 -05:00
|
|
|
return this.jobStore.hasJobSync(streamId);
|
2025-12-03 21:48:04 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Get job status.
|
|
|
|
|
*/
|
2025-12-12 01:10:08 -05:00
|
|
|
getJobStatus(streamId: string): t.GenerationJobStatus | undefined {
|
2025-12-12 02:16:24 -05:00
|
|
|
const jobData = this.jobStore.getJobSync(streamId);
|
|
|
|
|
return jobData?.status as t.GenerationJobStatus | undefined;
|
2025-12-03 21:48:04 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Mark job as complete.
|
|
|
|
|
*/
|
2025-12-12 02:16:24 -05:00
|
|
|
async completeJob(streamId: string, error?: string): Promise<void> {
|
|
|
|
|
await this.jobStore.updateJob(streamId, {
|
|
|
|
|
status: error ? 'error' : 'complete',
|
|
|
|
|
completedAt: Date.now(),
|
|
|
|
|
error,
|
|
|
|
|
});
|
2025-12-03 21:48:04 -05:00
|
|
|
|
2025-12-12 02:16:24 -05:00
|
|
|
// Clear content state
|
|
|
|
|
this.contentState.clearContentState(streamId);
|
2025-12-03 21:48:04 -05:00
|
|
|
|
2025-12-12 02:16:24 -05:00
|
|
|
logger.debug(`[GenerationJobManager] Job completed: ${streamId}`);
|
2025-12-03 21:48:04 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Abort a job (user-initiated).
|
|
|
|
|
*/
|
2025-12-12 02:16:24 -05:00
|
|
|
async abortJob(streamId: string): Promise<void> {
|
|
|
|
|
const jobData = this.jobStore.getJobSync(streamId);
|
|
|
|
|
const runtime = this.runtimeState.get(streamId);
|
|
|
|
|
|
|
|
|
|
if (!jobData) {
|
2025-12-11 09:52:15 -05:00
|
|
|
logger.warn(`[GenerationJobManager] Cannot abort - job not found: ${streamId}`);
|
2025-12-03 21:48:04 -05:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-12 02:16:24 -05:00
|
|
|
if (runtime) {
|
|
|
|
|
runtime.abortController.abort();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
await this.jobStore.updateJob(streamId, {
|
|
|
|
|
status: 'aborted',
|
|
|
|
|
completedAt: Date.now(),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// Create final event for abort
|
|
|
|
|
const userMessageId = jobData.userMessage?.messageId;
|
|
|
|
|
const content = this.contentState.getContentParts(streamId) ?? [];
|
|
|
|
|
|
|
|
|
|
const abortFinalEvent: t.ServerSentEvent = {
|
2025-12-11 09:52:15 -05:00
|
|
|
final: true,
|
2025-12-12 02:16:24 -05:00
|
|
|
conversation: { conversationId: jobData.conversationId },
|
2025-12-11 09:52:15 -05:00
|
|
|
title: 'New Chat',
|
2025-12-12 02:16:24 -05:00
|
|
|
requestMessage: jobData.userMessage
|
2025-12-11 09:52:15 -05:00
|
|
|
? {
|
2025-12-11 21:19:43 -05:00
|
|
|
messageId: userMessageId,
|
2025-12-12 02:16:24 -05:00
|
|
|
parentMessageId: jobData.userMessage.parentMessageId,
|
|
|
|
|
conversationId: jobData.conversationId,
|
|
|
|
|
text: jobData.userMessage.text ?? '',
|
2025-12-11 21:19:43 -05:00
|
|
|
isCreatedByUser: true,
|
2025-12-11 09:52:15 -05:00
|
|
|
}
|
|
|
|
|
: null,
|
|
|
|
|
responseMessage: {
|
2025-12-12 02:16:24 -05:00
|
|
|
messageId: jobData.responseMessageId ?? `${userMessageId ?? 'aborted'}_`,
|
|
|
|
|
parentMessageId: userMessageId,
|
|
|
|
|
conversationId: jobData.conversationId,
|
|
|
|
|
content,
|
|
|
|
|
sender: jobData.sender ?? 'AI',
|
2025-12-11 09:52:15 -05:00
|
|
|
unfinished: true,
|
2025-12-11 21:19:43 -05:00
|
|
|
error: false,
|
|
|
|
|
isCreatedByUser: false,
|
2025-12-11 09:52:15 -05:00
|
|
|
},
|
|
|
|
|
aborted: true,
|
2025-12-12 01:10:08 -05:00
|
|
|
} as unknown as t.ServerSentEvent;
|
2025-12-11 09:52:15 -05:00
|
|
|
|
2025-12-12 02:16:24 -05:00
|
|
|
if (runtime) {
|
|
|
|
|
runtime.finalEvent = abortFinalEvent;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
this.eventTransport.emitDone(streamId, abortFinalEvent);
|
|
|
|
|
this.contentState.clearContentState(streamId);
|
2025-12-03 21:48:04 -05:00
|
|
|
|
|
|
|
|
logger.debug(`[GenerationJobManager] Job aborted: ${streamId}`);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2025-12-12 02:16:24 -05:00
|
|
|
* Subscribe to a job's event stream.
|
2025-12-03 21:48:04 -05:00
|
|
|
*/
|
|
|
|
|
subscribe(
|
|
|
|
|
streamId: string,
|
2025-12-12 01:10:08 -05:00
|
|
|
onChunk: t.ChunkHandler,
|
|
|
|
|
onDone?: t.DoneHandler,
|
|
|
|
|
onError?: t.ErrorHandler,
|
|
|
|
|
): { unsubscribe: t.UnsubscribeFn } | null {
|
2025-12-12 02:16:24 -05:00
|
|
|
const runtime = this.runtimeState.get(streamId);
|
|
|
|
|
if (!runtime) {
|
2025-12-03 21:48:04 -05:00
|
|
|
return null;
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-12 02:16:24 -05:00
|
|
|
const jobData = this.jobStore.getJobSync(streamId);
|
|
|
|
|
|
|
|
|
|
// If job already complete, send final event
|
2025-12-04 08:57:13 -05:00
|
|
|
setImmediate(() => {
|
2025-12-12 02:16:24 -05:00
|
|
|
if (
|
|
|
|
|
runtime.finalEvent &&
|
|
|
|
|
jobData &&
|
|
|
|
|
['complete', 'error', 'aborted'].includes(jobData.status)
|
|
|
|
|
) {
|
|
|
|
|
onDone?.(runtime.finalEvent);
|
2025-12-04 08:57:13 -05:00
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
2025-12-12 02:16:24 -05:00
|
|
|
const subscription = this.eventTransport.subscribe(streamId, {
|
|
|
|
|
onChunk: (event) => {
|
|
|
|
|
const e = event as t.ServerSentEvent;
|
|
|
|
|
// Filter out internal events
|
|
|
|
|
if (!(e as Record<string, unknown>)._internal) {
|
|
|
|
|
onChunk(e);
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
onDone: (event) => onDone?.(event as t.ServerSentEvent),
|
|
|
|
|
onError,
|
|
|
|
|
});
|
2025-12-03 21:48:04 -05:00
|
|
|
|
2025-12-12 02:16:24 -05:00
|
|
|
// Signal ready on first subscriber
|
|
|
|
|
if (this.eventTransport.isFirstSubscriber(streamId)) {
|
|
|
|
|
runtime.resolveReady();
|
2025-12-03 21:48:04 -05:00
|
|
|
logger.debug(`[GenerationJobManager] First subscriber ready for ${streamId}`);
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-12 02:16:24 -05:00
|
|
|
return subscription;
|
2025-12-03 21:48:04 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Emit a chunk event to all subscribers.
|
|
|
|
|
*/
|
2025-12-12 01:10:08 -05:00
|
|
|
emitChunk(streamId: string, event: t.ServerSentEvent): void {
|
2025-12-12 02:16:24 -05:00
|
|
|
const jobData = this.jobStore.getJobSync(streamId);
|
|
|
|
|
if (!jobData || jobData.status !== 'running') {
|
2025-12-03 21:48:04 -05:00
|
|
|
return;
|
|
|
|
|
}
|
2025-12-04 08:57:13 -05:00
|
|
|
|
2025-12-11 09:52:15 -05:00
|
|
|
// Track user message from created event
|
2025-12-12 02:16:24 -05:00
|
|
|
this.trackUserMessage(streamId, event);
|
2025-12-04 08:57:13 -05:00
|
|
|
|
2025-12-12 02:16:24 -05:00
|
|
|
this.eventTransport.emitChunk(streamId, event);
|
2025-12-03 21:48:04 -05:00
|
|
|
}
|
|
|
|
|
|
2025-12-11 09:52:15 -05:00
|
|
|
/**
|
2025-12-12 02:16:24 -05:00
|
|
|
* Track user message from created event.
|
2025-12-11 09:52:15 -05:00
|
|
|
*/
|
2025-12-12 02:16:24 -05:00
|
|
|
private trackUserMessage(streamId: string, event: t.ServerSentEvent): void {
|
2025-12-11 09:52:15 -05:00
|
|
|
const data = event as Record<string, unknown>;
|
|
|
|
|
if (!data.created || !data.message) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const message = data.message as Record<string, unknown>;
|
2025-12-12 02:16:24 -05:00
|
|
|
const updates: Partial<SerializableJobData> = {
|
|
|
|
|
userMessage: {
|
|
|
|
|
messageId: message.messageId as string,
|
|
|
|
|
parentMessageId: message.parentMessageId as string | undefined,
|
|
|
|
|
conversationId: message.conversationId as string | undefined,
|
|
|
|
|
text: message.text as string | undefined,
|
|
|
|
|
},
|
2025-12-11 09:52:15 -05:00
|
|
|
};
|
|
|
|
|
|
2025-12-12 02:16:24 -05:00
|
|
|
if (message.conversationId) {
|
|
|
|
|
updates.conversationId = message.conversationId as string;
|
2025-12-11 09:52:15 -05:00
|
|
|
}
|
|
|
|
|
|
2025-12-12 02:16:24 -05:00
|
|
|
this.jobStore.updateJob(streamId, updates);
|
|
|
|
|
logger.debug(`[GenerationJobManager] Tracked user message for ${streamId}`);
|
2025-12-11 09:52:15 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2025-12-12 02:16:24 -05:00
|
|
|
* Update job metadata.
|
2025-12-11 09:52:15 -05:00
|
|
|
*/
|
2025-12-12 01:10:08 -05:00
|
|
|
updateMetadata(streamId: string, metadata: Partial<t.GenerationJobMetadata>): void {
|
2025-12-12 02:16:24 -05:00
|
|
|
const updates: Partial<SerializableJobData> = {};
|
|
|
|
|
if (metadata.responseMessageId) {
|
|
|
|
|
updates.responseMessageId = metadata.responseMessageId;
|
|
|
|
|
}
|
|
|
|
|
if (metadata.sender) {
|
|
|
|
|
updates.sender = metadata.sender;
|
|
|
|
|
}
|
|
|
|
|
if (metadata.conversationId) {
|
|
|
|
|
updates.conversationId = metadata.conversationId;
|
2025-12-11 09:52:15 -05:00
|
|
|
}
|
2025-12-12 02:16:24 -05:00
|
|
|
if (metadata.userMessage) {
|
|
|
|
|
updates.userMessage = metadata.userMessage;
|
|
|
|
|
}
|
|
|
|
|
this.jobStore.updateJob(streamId, updates);
|
2025-12-11 09:52:15 -05:00
|
|
|
logger.debug(`[GenerationJobManager] Updated metadata for ${streamId}`);
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-12 01:10:08 -05:00
|
|
|
/**
|
|
|
|
|
* Set reference to the graph's contentParts array.
|
|
|
|
|
*/
|
|
|
|
|
setContentParts(streamId: string, contentParts: Agents.MessageContentComplex[]): void {
|
2025-12-12 02:16:24 -05:00
|
|
|
if (!this.jobStore.hasJobSync(streamId)) {
|
2025-12-12 01:10:08 -05:00
|
|
|
return;
|
|
|
|
|
}
|
2025-12-12 02:16:24 -05:00
|
|
|
this.contentState.setContentParts(streamId, contentParts);
|
|
|
|
|
logger.debug(`[GenerationJobManager] Set contentParts for ${streamId}`);
|
2025-12-12 01:10:08 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Set reference to the graph instance.
|
|
|
|
|
*/
|
|
|
|
|
setGraph(streamId: string, graph: StandardGraph): void {
|
2025-12-12 02:16:24 -05:00
|
|
|
if (!this.jobStore.hasJobSync(streamId)) {
|
2025-12-12 01:10:08 -05:00
|
|
|
return;
|
|
|
|
|
}
|
2025-12-12 02:16:24 -05:00
|
|
|
this.contentState.setGraph(streamId, graph);
|
2025-12-12 01:10:08 -05:00
|
|
|
logger.debug(`[GenerationJobManager] Set graph reference for ${streamId}`);
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-11 09:52:15 -05:00
|
|
|
/**
|
|
|
|
|
* Get resume state for reconnecting clients.
|
|
|
|
|
*/
|
2025-12-12 01:10:08 -05:00
|
|
|
getResumeState(streamId: string): t.ResumeState | null {
|
2025-12-12 02:16:24 -05:00
|
|
|
const jobData = this.jobStore.getJobSync(streamId);
|
|
|
|
|
if (!jobData) {
|
2025-12-11 09:52:15 -05:00
|
|
|
return null;
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-12 02:16:24 -05:00
|
|
|
const aggregatedContent = this.contentState.getContentParts(streamId) ?? [];
|
|
|
|
|
const runSteps = this.contentState.getRunSteps(streamId);
|
2025-12-12 01:10:08 -05:00
|
|
|
|
|
|
|
|
logger.debug(`[GenerationJobManager] getResumeState:`, {
|
|
|
|
|
streamId,
|
|
|
|
|
aggregatedContentLength: aggregatedContent.length,
|
|
|
|
|
runStepsLength: runSteps.length,
|
|
|
|
|
});
|
|
|
|
|
|
2025-12-11 09:52:15 -05:00
|
|
|
return {
|
2025-12-12 01:10:08 -05:00
|
|
|
runSteps,
|
|
|
|
|
aggregatedContent,
|
2025-12-12 02:16:24 -05:00
|
|
|
userMessage: jobData.userMessage,
|
|
|
|
|
responseMessageId: jobData.responseMessageId,
|
|
|
|
|
conversationId: jobData.conversationId,
|
|
|
|
|
sender: jobData.sender,
|
2025-12-11 09:52:15 -05:00
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2025-12-12 02:16:24 -05:00
|
|
|
* Mark that sync has been sent.
|
2025-12-11 09:52:15 -05:00
|
|
|
*/
|
|
|
|
|
markSyncSent(streamId: string): void {
|
2025-12-12 02:16:24 -05:00
|
|
|
const runtime = this.runtimeState.get(streamId);
|
|
|
|
|
if (runtime) {
|
|
|
|
|
runtime.syncSent = true;
|
2025-12-11 09:52:15 -05:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2025-12-12 02:16:24 -05:00
|
|
|
* Check if sync has been sent.
|
2025-12-11 09:52:15 -05:00
|
|
|
*/
|
|
|
|
|
wasSyncSent(streamId: string): boolean {
|
2025-12-12 02:16:24 -05:00
|
|
|
return this.runtimeState.get(streamId)?.syncSent ?? false;
|
2025-12-11 09:52:15 -05:00
|
|
|
}
|
|
|
|
|
|
2025-12-03 21:48:04 -05:00
|
|
|
/**
|
2025-12-12 02:16:24 -05:00
|
|
|
* Emit a done event.
|
2025-12-03 21:48:04 -05:00
|
|
|
*/
|
2025-12-12 01:10:08 -05:00
|
|
|
emitDone(streamId: string, event: t.ServerSentEvent): void {
|
2025-12-12 02:16:24 -05:00
|
|
|
const runtime = this.runtimeState.get(streamId);
|
|
|
|
|
if (runtime) {
|
|
|
|
|
runtime.finalEvent = event;
|
2025-12-03 21:48:04 -05:00
|
|
|
}
|
2025-12-12 02:16:24 -05:00
|
|
|
this.eventTransport.emitDone(streamId, event);
|
2025-12-03 21:48:04 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2025-12-12 02:16:24 -05:00
|
|
|
* Emit an error event.
|
2025-12-03 21:48:04 -05:00
|
|
|
*/
|
|
|
|
|
emitError(streamId: string, error: string): void {
|
2025-12-12 02:16:24 -05:00
|
|
|
this.eventTransport.emitError(streamId, error);
|
2025-12-03 21:48:04 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2025-12-12 02:16:24 -05:00
|
|
|
* Cleanup expired jobs.
|
2025-12-03 21:48:04 -05:00
|
|
|
*/
|
2025-12-12 02:16:24 -05:00
|
|
|
private async cleanup(): Promise<void> {
|
|
|
|
|
const count = await this.jobStore.cleanup();
|
|
|
|
|
|
|
|
|
|
// Cleanup runtime state for deleted jobs
|
|
|
|
|
for (const streamId of this.runtimeState.keys()) {
|
|
|
|
|
if (!this.jobStore.hasJobSync(streamId)) {
|
|
|
|
|
this.runtimeState.delete(streamId);
|
|
|
|
|
this.contentState.clearContentState(streamId);
|
|
|
|
|
this.eventTransport.cleanup(streamId);
|
2025-12-03 21:48:04 -05:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-12 02:16:24 -05:00
|
|
|
if (count > 0) {
|
|
|
|
|
logger.debug(`[GenerationJobManager] Cleaned up ${count} expired jobs`);
|
2025-12-03 21:48:04 -05:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-04 08:57:13 -05:00
|
|
|
/**
|
|
|
|
|
* Get stream info for status endpoint.
|
|
|
|
|
*/
|
|
|
|
|
getStreamInfo(streamId: string): {
|
|
|
|
|
active: boolean;
|
2025-12-12 01:10:08 -05:00
|
|
|
status: t.GenerationJobStatus;
|
|
|
|
|
aggregatedContent?: Agents.MessageContentComplex[];
|
2025-12-04 08:57:13 -05:00
|
|
|
createdAt: number;
|
|
|
|
|
} | null {
|
2025-12-12 02:16:24 -05:00
|
|
|
const jobData = this.jobStore.getJobSync(streamId);
|
|
|
|
|
if (!jobData) {
|
2025-12-04 08:57:13 -05:00
|
|
|
return null;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return {
|
2025-12-12 02:16:24 -05:00
|
|
|
active: jobData.status === 'running',
|
|
|
|
|
status: jobData.status as t.GenerationJobStatus,
|
|
|
|
|
aggregatedContent: this.contentState.getContentParts(streamId) ?? [],
|
|
|
|
|
createdAt: jobData.createdAt,
|
2025-12-04 08:57:13 -05:00
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-03 21:48:04 -05:00
|
|
|
/**
|
2025-12-12 02:16:24 -05:00
|
|
|
* Get total job count.
|
2025-12-03 21:48:04 -05:00
|
|
|
*/
|
|
|
|
|
getJobCount(): number {
|
2025-12-12 02:16:24 -05:00
|
|
|
return this.jobStore.getJobCount();
|
2025-12-03 21:48:04 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2025-12-12 02:16:24 -05:00
|
|
|
* Get job count by status.
|
2025-12-03 21:48:04 -05:00
|
|
|
*/
|
2025-12-12 01:10:08 -05:00
|
|
|
getJobCountByStatus(): Record<t.GenerationJobStatus, number> {
|
2025-12-12 02:16:24 -05:00
|
|
|
return this.jobStore.getJobCountByStatus() as Record<t.GenerationJobStatus, number>;
|
2025-12-03 21:48:04 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2025-12-12 02:16:24 -05:00
|
|
|
* Destroy the manager.
|
2025-12-03 21:48:04 -05:00
|
|
|
*/
|
|
|
|
|
destroy(): void {
|
|
|
|
|
if (this.cleanupInterval) {
|
|
|
|
|
clearInterval(this.cleanupInterval);
|
|
|
|
|
this.cleanupInterval = null;
|
|
|
|
|
}
|
2025-12-12 02:16:24 -05:00
|
|
|
|
|
|
|
|
this.jobStore.destroy();
|
|
|
|
|
this.eventTransport.destroy();
|
|
|
|
|
this.contentState.destroy();
|
|
|
|
|
this.runtimeState.clear();
|
|
|
|
|
|
2025-12-03 21:48:04 -05:00
|
|
|
logger.debug('[GenerationJobManager] Destroyed');
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
export const GenerationJobManager = new GenerationJobManagerClass();
|
|
|
|
|
export { GenerationJobManagerClass };
|