From 8a9e92e882a4c554bb74091be875aeee22511fd1 Mon Sep 17 00:00:00 2001 From: rtula Date: Fri, 27 Feb 2026 15:45:34 +0530 Subject: [PATCH] * **Support OpenAI Playground Stored Prompts (stored_prompt_id):** Adds a stored_prompt_id parameter to the OpenAI endpoint, allowing users to supply an OpenAI Playground prompt ID (pmpt_...) from the LibreChat UI. When set, the conversation is automatically routed through the Responses API with the stored prompt's system instructions and vector store applied. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ** Changes ** * **Backend (llm.ts):** Injects modelKwargs.prompt = { id: stored_prompt_id } into the raw API call and forces useResponsesApi: true when the field is set. * **Schema (schemas.ts):** Adds stored_prompt_id to openAISchema, tPresetSchema.pick, and openAIBaseSchema.pick so the field is validated and survives Zod serialization for presets and conversations. * **Frontend (parameterSettings.ts):** Registers a full-width text input in the OpenAI parameters panel and preset editor. * **Translations (en/translation.json):** Adds label, placeholder, and tooltip description keys. * **Data Schemas (data-schemas):** Adds stored_prompt_id to IConversation, IPreset, and the Mongoose schema so the value is persisted in MongoDB. * **Tests (llm.spec.ts):** 4 unit tests covering: prompt injection, no-op when absent, coexistence with useResponsesApi, and no field leakage into llmConfig. **Usage:** In the Parameters panel for any OpenAI endpoint, enter a stored prompt ID (e.g. pmpt_abc123). The Responses API is enabled automatically — no other configuration needed. Save it as a preset to reuse across conversations. --- client/src/locales/en/translation.json | 3 + packages/api/src/endpoints/openai/llm.spec.ts | 62 +++++++++++++++++++ packages/api/src/endpoints/openai/llm.ts | 16 +++++ .../data-provider/src/parameterSettings.ts | 16 +++++ packages/data-provider/src/schemas.ts | 11 ++++ packages/data-schemas/src/schema/defaults.ts | 4 ++ packages/data-schemas/src/schema/preset.ts | 1 + packages/data-schemas/src/types/convo.ts | 1 + 8 files changed, 114 insertions(+) diff --git a/client/src/locales/en/translation.json b/client/src/locales/en/translation.json index e0dad68431..c4acb9cb7a 100644 --- a/client/src/locales/en/translation.json +++ b/client/src/locales/en/translation.json @@ -349,6 +349,9 @@ "com_endpoint_top_p": "Top P", "com_endpoint_use_active_assistant": "Use Active Assistant", "com_endpoint_use_responses_api": "Use Responses API", + "com_endpoint_stored_prompt_id": "Stored Prompt ID", + "com_endpoint_stored_prompt_id_placeholder": "pmpt_...", + "com_endpoint_openai_stored_prompt_id": "OpenAI Playground stored prompt ID (pmpt_...). Routes the conversation through a pre-configured prompt that carries its own system instructions and file-search vector store. Automatically enables the Responses API.", "com_endpoint_use_search_grounding": "Grounding with Google Search", "com_endpoint_verbosity": "Verbosity", "com_error_endpoint_models_not_loaded": "Models for {{0}} could not be loaded. Please refresh the page and try again.", diff --git a/packages/api/src/endpoints/openai/llm.spec.ts b/packages/api/src/endpoints/openai/llm.spec.ts index 8e92332e24..c1c386d980 100644 --- a/packages/api/src/endpoints/openai/llm.spec.ts +++ b/packages/api/src/endpoints/openai/llm.spec.ts @@ -649,6 +649,68 @@ describe('extractDefaultParams', () => { }); }); +describe('Stored Playground Prompt (stored_prompt_id)', () => { + it('should inject prompt.id into modelKwargs and force useResponsesApi when stored_prompt_id is set', () => { + const result = getOpenAILLMConfig({ + apiKey: 'test-api-key', + streaming: true, + modelOptions: { + model: 'gpt-4o', + stored_prompt_id: 'pmpt_abc123', + } as Partial, + }); + + expect(result.llmConfig).toHaveProperty('useResponsesApi', true); + expect(result.llmConfig.modelKwargs).toEqual( + expect.objectContaining({ prompt: { id: 'pmpt_abc123' } }), + ); + }); + + it('should not set modelKwargs.prompt when stored_prompt_id is absent', () => { + const result = getOpenAILLMConfig({ + apiKey: 'test-api-key', + streaming: true, + modelOptions: { + model: 'gpt-4o', + useResponsesApi: true, + } as Partial, + }); + + expect(result.llmConfig.modelKwargs?.prompt).toBeUndefined(); + }); + + it('should preserve a manually-set useResponsesApi=true alongside stored_prompt_id', () => { + const result = getOpenAILLMConfig({ + apiKey: 'test-api-key', + streaming: true, + modelOptions: { + model: 'gpt-4o', + useResponsesApi: true, + stored_prompt_id: 'pmpt_xyz789', + } as Partial, + }); + + expect(result.llmConfig).toHaveProperty('useResponsesApi', true); + expect(result.llmConfig.modelKwargs).toEqual( + expect.objectContaining({ prompt: { id: 'pmpt_xyz789' } }), + ); + }); + + it('should not add stored_prompt_id itself to llmConfig or modelKwargs as a raw field', () => { + const result = getOpenAILLMConfig({ + apiKey: 'test-api-key', + streaming: true, + modelOptions: { + model: 'gpt-4o', + stored_prompt_id: 'pmpt_abc123', + } as Partial, + }); + + expect(result.llmConfig).not.toHaveProperty('stored_prompt_id'); + expect(result.llmConfig.modelKwargs?.stored_prompt_id).toBeUndefined(); + }); +}); + describe('applyDefaultParams', () => { it('should apply defaults only when field is undefined', () => { const target: Record = { diff --git a/packages/api/src/endpoints/openai/llm.ts b/packages/api/src/endpoints/openai/llm.ts index f25971735c..e4323f6578 100644 --- a/packages/api/src/endpoints/openai/llm.ts +++ b/packages/api/src/endpoints/openai/llm.ts @@ -53,6 +53,7 @@ export const knownOpenAIParams = new Set([ 'truncation', 'include', 'previous_response_id', + 'stored_prompt_id', // LangChain specific '__includeRawResponse', 'maxConcurrency', @@ -150,6 +151,7 @@ export function getOpenAILLMConfig({ reasoning_summary, verbosity, web_search, + stored_prompt_id, frequency_penalty, presence_penalty, ...modelOptions @@ -264,6 +266,20 @@ export function getOpenAILLMConfig({ tools.push({ type: 'web_search' }); } + if (stored_prompt_id) { + /** + * Forward a stored OpenAI Playground prompt by ID. + * The Responses API accepts `prompt: { id, variables? }` which causes OpenAI + * to hydrate the stored system instructions, file-search vector store, and + * any tool definitions configured in the Playground. + * We force `useResponsesApi` here because Chat Completions does not support + * this parameter. + */ + llmConfig.useResponsesApi = true; + modelKwargs.prompt = { id: stored_prompt_id }; + hasModelKwargs = true; + } + /** * Note: OpenAI reasoning models (o1/o3/gpt-5) do not support temperature and other sampling parameters * Exception: gpt-5-chat and versioned models like gpt-5.1 DO support these parameters diff --git a/packages/data-provider/src/parameterSettings.ts b/packages/data-provider/src/parameterSettings.ts index 0796efe773..61ce25841a 100644 --- a/packages/data-provider/src/parameterSettings.ts +++ b/packages/data-provider/src/parameterSettings.ts @@ -267,6 +267,20 @@ const openAIParams: Record = { showDefault: false, columnSpan: 2, }, + stored_prompt_id: { + key: 'stored_prompt_id', + label: 'com_endpoint_stored_prompt_id', + labelCode: true, + type: 'string', + default: '', + component: 'input', + placeholder: 'com_endpoint_stored_prompt_id_placeholder', + placeholderCode: true, + description: 'com_endpoint_openai_stored_prompt_id', + descriptionCode: true, + optionType: 'model', + columnSpan: 4, + }, web_search: { key: 'web_search', label: 'com_ui_web_search', @@ -712,6 +726,7 @@ const openAI: SettingsConfiguration = [ openAIParams.web_search, openAIParams.reasoning_effort, openAIParams.useResponsesApi, + openAIParams.stored_prompt_id, openAIParams.reasoning_summary, openAIParams.verbosity, openAIParams.disableStreaming, @@ -738,6 +753,7 @@ const openAICol2: SettingsConfiguration = [ openAIParams.reasoning_summary, openAIParams.verbosity, openAIParams.useResponsesApi, + openAIParams.stored_prompt_id, openAIParams.web_search, openAIParams.disableStreaming, librechat.fileTokenLimit, diff --git a/packages/data-provider/src/schemas.ts b/packages/data-provider/src/schemas.ts index 90d5362273..f26cc571f0 100644 --- a/packages/data-provider/src/schemas.ts +++ b/packages/data-provider/src/schemas.ts @@ -738,6 +738,14 @@ export const tConversationSchema = z.object({ verbosity: eVerbositySchema.optional().nullable(), /* OpenAI: use Responses API */ useResponsesApi: z.boolean().optional(), + /** + * OpenAI Playground stored prompt ID (e.g. "pmpt_..."). + * When set, LibreChat forwards `prompt: { id }` in the Responses API request, + * routing the call through a pre-configured Playground prompt that can carry + * its own system instructions, file-search vector store, and tool definitions. + * Implicitly enables `useResponsesApi`. + */ + stored_prompt_id: z.string().optional(), /* Anthropic: Effort control */ effort: eAnthropicEffortSchema.optional().nullable(), /* OpenAI Responses API / Anthropic API / Google API */ @@ -848,6 +856,8 @@ export const tQueryParamsSchema = tConversationSchema verbosity: true, /** @endpoints openAI, custom, azureOpenAI */ useResponsesApi: true, + /** @endpoints openAI, custom, azureOpenAI */ + stored_prompt_id: true, /** @endpoints openAI, anthropic, google */ web_search: true, /** @endpoints openAI, custom, azureOpenAI */ @@ -1119,6 +1129,7 @@ export const openAIBaseSchema = tConversationSchema.pick({ reasoning_summary: true, verbosity: true, useResponsesApi: true, + stored_prompt_id: true, web_search: true, disableStreaming: true, fileTokenLimit: true, diff --git a/packages/data-schemas/src/schema/defaults.ts b/packages/data-schemas/src/schema/defaults.ts index 33af668384..4bad49ca87 100644 --- a/packages/data-schemas/src/schema/defaults.ts +++ b/packages/data-schemas/src/schema/defaults.ts @@ -136,6 +136,10 @@ export const conversationPreset = { useResponsesApi: { type: Boolean, }, + /** OpenAI Playground stored prompt ID (pmpt_...) */ + stored_prompt_id: { + type: String, + }, /** OpenAI Responses API / Anthropic API / Google API */ web_search: { type: Boolean, diff --git a/packages/data-schemas/src/schema/preset.ts b/packages/data-schemas/src/schema/preset.ts index fc23d86c0b..cb00bb0661 100644 --- a/packages/data-schemas/src/schema/preset.ts +++ b/packages/data-schemas/src/schema/preset.ts @@ -50,6 +50,7 @@ export interface IPreset extends Document { reasoning_summary?: string; verbosity?: string; useResponsesApi?: boolean; + stored_prompt_id?: string; web_search?: boolean; disableStreaming?: boolean; fileTokenLimit?: number; diff --git a/packages/data-schemas/src/types/convo.ts b/packages/data-schemas/src/types/convo.ts index 43965a5827..eaa9993f4c 100644 --- a/packages/data-schemas/src/types/convo.ts +++ b/packages/data-schemas/src/types/convo.ts @@ -48,6 +48,7 @@ export interface IConversation extends Document { reasoning_summary?: string; verbosity?: string; useResponsesApi?: boolean; + stored_prompt_id?: string; web_search?: boolean; disableStreaming?: boolean; fileTokenLimit?: number;