mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-02-20 09:24:10 +01:00
🔧 fix: Clean empty strings from model_parameters for Agents/OpenAI (#11248)
Some checks are pending
Docker Dev Branch Images Build / build (Dockerfile, lc-dev, node) (push) Waiting to run
Docker Dev Branch Images Build / build (Dockerfile.multi, lc-dev-api, api-build) (push) Waiting to run
Docker Dev Images Build / build (Dockerfile, librechat-dev, node) (push) Waiting to run
Docker Dev Images Build / build (Dockerfile.multi, librechat-dev-api, api-build) (push) Waiting to run
Sync Locize Translations & Create Translation PR / Sync Translation Keys with Locize (push) Waiting to run
Sync Locize Translations & Create Translation PR / Create Translation PR on Version Published (push) Blocked by required conditions
Some checks are pending
Docker Dev Branch Images Build / build (Dockerfile, lc-dev, node) (push) Waiting to run
Docker Dev Branch Images Build / build (Dockerfile.multi, lc-dev-api, api-build) (push) Waiting to run
Docker Dev Images Build / build (Dockerfile, librechat-dev, node) (push) Waiting to run
Docker Dev Images Build / build (Dockerfile.multi, librechat-dev-api, api-build) (push) Waiting to run
Sync Locize Translations & Create Translation PR / Sync Translation Keys with Locize (push) Waiting to run
Sync Locize Translations & Create Translation PR / Create Translation PR on Version Published (push) Blocked by required conditions
This commit is contained in:
parent
9845b3148e
commit
24e8a258cd
4 changed files with 170 additions and 1 deletions
|
|
@ -109,6 +109,10 @@ const createAgentHandler = async (req, res) => {
|
||||||
const validatedData = agentCreateSchema.parse(req.body);
|
const validatedData = agentCreateSchema.parse(req.body);
|
||||||
const { tools = [], ...agentData } = removeNullishValues(validatedData);
|
const { tools = [], ...agentData } = removeNullishValues(validatedData);
|
||||||
|
|
||||||
|
if (agentData.model_parameters && typeof agentData.model_parameters === 'object') {
|
||||||
|
agentData.model_parameters = removeNullishValues(agentData.model_parameters, true);
|
||||||
|
}
|
||||||
|
|
||||||
const { id: userId } = req.user;
|
const { id: userId } = req.user;
|
||||||
|
|
||||||
agentData.id = `agent_${nanoid()}`;
|
agentData.id = `agent_${nanoid()}`;
|
||||||
|
|
@ -259,6 +263,11 @@ const updateAgentHandler = async (req, res) => {
|
||||||
// Preserve explicit null for avatar to allow resetting the avatar
|
// Preserve explicit null for avatar to allow resetting the avatar
|
||||||
const { avatar: avatarField, _id, ...rest } = validatedData;
|
const { avatar: avatarField, _id, ...rest } = validatedData;
|
||||||
const updateData = removeNullishValues(rest);
|
const updateData = removeNullishValues(rest);
|
||||||
|
|
||||||
|
if (updateData.model_parameters && typeof updateData.model_parameters === 'object') {
|
||||||
|
updateData.model_parameters = removeNullishValues(updateData.model_parameters, true);
|
||||||
|
}
|
||||||
|
|
||||||
if (avatarField === null) {
|
if (avatarField === null) {
|
||||||
updateData.avatar = avatarField;
|
updateData.avatar = avatarField;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -357,6 +357,46 @@ describe('Agent Controllers - Mass Assignment Protection', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('should remove empty strings from model_parameters (Issue Fix)', async () => {
|
||||||
|
// This tests the fix for empty strings being sent to API instead of being omitted
|
||||||
|
// When a user clears a numeric field (like max_tokens), it should be removed, not sent as ""
|
||||||
|
const dataWithEmptyModelParams = {
|
||||||
|
provider: 'azureOpenAI',
|
||||||
|
model: 'gpt-4',
|
||||||
|
name: 'Agent with Empty Model Params',
|
||||||
|
model_parameters: {
|
||||||
|
temperature: 0.7, // Valid number - should be preserved
|
||||||
|
max_tokens: '', // Empty string - should be removed
|
||||||
|
maxContextTokens: '', // Empty string - should be removed
|
||||||
|
topP: 0, // Zero value - should be preserved (not treated as empty)
|
||||||
|
frequency_penalty: '', // Empty string - should be removed
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
mockReq.body = dataWithEmptyModelParams;
|
||||||
|
|
||||||
|
await createAgentHandler(mockReq, mockRes);
|
||||||
|
|
||||||
|
expect(mockRes.status).toHaveBeenCalledWith(201);
|
||||||
|
|
||||||
|
const createdAgent = mockRes.json.mock.calls[0][0];
|
||||||
|
expect(createdAgent.model_parameters).toBeDefined();
|
||||||
|
// Valid numbers should be preserved
|
||||||
|
expect(createdAgent.model_parameters.temperature).toBe(0.7);
|
||||||
|
expect(createdAgent.model_parameters.topP).toBe(0);
|
||||||
|
// Empty strings should be removed
|
||||||
|
expect(createdAgent.model_parameters.max_tokens).toBeUndefined();
|
||||||
|
expect(createdAgent.model_parameters.maxContextTokens).toBeUndefined();
|
||||||
|
expect(createdAgent.model_parameters.frequency_penalty).toBeUndefined();
|
||||||
|
|
||||||
|
// Verify in database
|
||||||
|
const agentInDb = await Agent.findOne({ id: createdAgent.id });
|
||||||
|
expect(agentInDb.model_parameters.temperature).toBe(0.7);
|
||||||
|
expect(agentInDb.model_parameters.topP).toBe(0);
|
||||||
|
expect(agentInDb.model_parameters.max_tokens).toBeUndefined();
|
||||||
|
expect(agentInDb.model_parameters.maxContextTokens).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
test('should handle invalid avatar format', async () => {
|
test('should handle invalid avatar format', async () => {
|
||||||
const dataWithInvalidAvatar = {
|
const dataWithInvalidAvatar = {
|
||||||
provider: 'openai',
|
provider: 'openai',
|
||||||
|
|
@ -539,6 +579,49 @@ describe('Agent Controllers - Mass Assignment Protection', () => {
|
||||||
expect(updatedAgent.tool_resources.invalid_tool).toBeUndefined();
|
expect(updatedAgent.tool_resources.invalid_tool).toBeUndefined();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('should remove empty strings from model_parameters during update (Issue Fix)', async () => {
|
||||||
|
// First create an agent with valid model_parameters
|
||||||
|
await Agent.updateOne(
|
||||||
|
{ id: existingAgentId },
|
||||||
|
{
|
||||||
|
model_parameters: {
|
||||||
|
temperature: 0.5,
|
||||||
|
max_tokens: 1000,
|
||||||
|
maxContextTokens: 2000,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
mockReq.user.id = existingAgentAuthorId.toString();
|
||||||
|
mockReq.params.id = existingAgentId;
|
||||||
|
// Simulate user clearing the fields (sends empty strings)
|
||||||
|
mockReq.body = {
|
||||||
|
model_parameters: {
|
||||||
|
temperature: 0.7, // Change to new value
|
||||||
|
max_tokens: '', // Clear this field (should be removed, not sent as "")
|
||||||
|
maxContextTokens: '', // Clear this field (should be removed, not sent as "")
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
await updateAgentHandler(mockReq, mockRes);
|
||||||
|
|
||||||
|
expect(mockRes.json).toHaveBeenCalled();
|
||||||
|
|
||||||
|
const updatedAgent = mockRes.json.mock.calls[0][0];
|
||||||
|
expect(updatedAgent.model_parameters).toBeDefined();
|
||||||
|
// Valid number should be updated
|
||||||
|
expect(updatedAgent.model_parameters.temperature).toBe(0.7);
|
||||||
|
// Empty strings should be removed, not sent as ""
|
||||||
|
expect(updatedAgent.model_parameters.max_tokens).toBeUndefined();
|
||||||
|
expect(updatedAgent.model_parameters.maxContextTokens).toBeUndefined();
|
||||||
|
|
||||||
|
// Verify in database
|
||||||
|
const agentInDb = await Agent.findOne({ id: existingAgentId });
|
||||||
|
expect(agentInDb.model_parameters.temperature).toBe(0.7);
|
||||||
|
expect(agentInDb.model_parameters.max_tokens).toBeUndefined();
|
||||||
|
expect(agentInDb.model_parameters.maxContextTokens).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
test('should return 404 for non-existent agent', async () => {
|
test('should return 404 for non-existent agent', async () => {
|
||||||
mockReq.user.id = existingAgentAuthorId.toString();
|
mockReq.user.id = existingAgentAuthorId.toString();
|
||||||
mockReq.params.id = `agent_${uuidv4()}`; // Non-existent ID
|
mockReq.params.id = `agent_${uuidv4()}`; // Non-existent ID
|
||||||
|
|
|
||||||
|
|
@ -56,6 +56,77 @@ describe('getOpenAILLMConfig', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('Empty String Handling (Issue Fix)', () => {
|
||||||
|
it('should remove empty string values for numeric parameters', () => {
|
||||||
|
const result = getOpenAILLMConfig({
|
||||||
|
apiKey: 'test-api-key',
|
||||||
|
streaming: true,
|
||||||
|
modelOptions: {
|
||||||
|
model: 'gpt-4',
|
||||||
|
temperature: '' as unknown as number,
|
||||||
|
topP: '' as unknown as number,
|
||||||
|
max_tokens: '' as unknown as number,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.llmConfig).not.toHaveProperty('temperature');
|
||||||
|
expect(result.llmConfig).not.toHaveProperty('topP');
|
||||||
|
expect(result.llmConfig).not.toHaveProperty('maxTokens');
|
||||||
|
expect(result.llmConfig).not.toHaveProperty('max_tokens');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should remove empty string values for frequency and presence penalties', () => {
|
||||||
|
const result = getOpenAILLMConfig({
|
||||||
|
apiKey: 'test-api-key',
|
||||||
|
streaming: true,
|
||||||
|
modelOptions: {
|
||||||
|
model: 'gpt-4',
|
||||||
|
frequency_penalty: '' as unknown as number,
|
||||||
|
presence_penalty: '' as unknown as number,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.llmConfig).not.toHaveProperty('frequencyPenalty');
|
||||||
|
expect(result.llmConfig).not.toHaveProperty('presencePenalty');
|
||||||
|
expect(result.llmConfig).not.toHaveProperty('frequency_penalty');
|
||||||
|
expect(result.llmConfig).not.toHaveProperty('presence_penalty');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve valid numeric values while removing empty strings', () => {
|
||||||
|
const result = getOpenAILLMConfig({
|
||||||
|
apiKey: 'test-api-key',
|
||||||
|
streaming: true,
|
||||||
|
modelOptions: {
|
||||||
|
model: 'gpt-4',
|
||||||
|
temperature: 0.7,
|
||||||
|
topP: '' as unknown as number,
|
||||||
|
max_tokens: 4096,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.llmConfig).toHaveProperty('temperature', 0.7);
|
||||||
|
expect(result.llmConfig).not.toHaveProperty('topP');
|
||||||
|
expect(result.llmConfig).toHaveProperty('maxTokens', 4096);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve zero values (not treat them as empty)', () => {
|
||||||
|
const result = getOpenAILLMConfig({
|
||||||
|
apiKey: 'test-api-key',
|
||||||
|
streaming: true,
|
||||||
|
modelOptions: {
|
||||||
|
model: 'gpt-4',
|
||||||
|
temperature: 0,
|
||||||
|
frequency_penalty: 0,
|
||||||
|
presence_penalty: 0,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.llmConfig).toHaveProperty('temperature', 0);
|
||||||
|
expect(result.llmConfig).toHaveProperty('frequencyPenalty', 0);
|
||||||
|
expect(result.llmConfig).toHaveProperty('presencePenalty', 0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe('OpenAI Reasoning Models (o1/o3/gpt-5)', () => {
|
describe('OpenAI Reasoning Models (o1/o3/gpt-5)', () => {
|
||||||
const reasoningModels = [
|
const reasoningModels = [
|
||||||
'o1',
|
'o1',
|
||||||
|
|
|
||||||
|
|
@ -139,6 +139,12 @@ export function getOpenAILLMConfig({
|
||||||
}): Pick<t.LLMConfigResult, 'llmConfig' | 'tools'> & {
|
}): Pick<t.LLMConfigResult, 'llmConfig' | 'tools'> & {
|
||||||
azure?: t.AzureOptions;
|
azure?: t.AzureOptions;
|
||||||
} {
|
} {
|
||||||
|
/** Clean empty strings from model options (e.g., temperature: "" should be removed) */
|
||||||
|
const cleanedModelOptions = removeNullishValues(
|
||||||
|
_modelOptions,
|
||||||
|
true,
|
||||||
|
) as Partial<t.OpenAIParameters>;
|
||||||
|
|
||||||
const {
|
const {
|
||||||
reasoning_effort,
|
reasoning_effort,
|
||||||
reasoning_summary,
|
reasoning_summary,
|
||||||
|
|
@ -147,7 +153,7 @@ export function getOpenAILLMConfig({
|
||||||
frequency_penalty,
|
frequency_penalty,
|
||||||
presence_penalty,
|
presence_penalty,
|
||||||
...modelOptions
|
...modelOptions
|
||||||
} = _modelOptions;
|
} = cleanedModelOptions;
|
||||||
|
|
||||||
const llmConfig = Object.assign(
|
const llmConfig = Object.assign(
|
||||||
{
|
{
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue