mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-01-05 18:18:51 +01:00
⌚ fix: Debounce setUserContext and Default State Param for OpenID Auth (#7559)
* fix: Add default random state parameter to OpenID auth request for providers that require it; ensure passport strategy uses it
* ⌚ refactor: debounce setUserContext to avoid race condition
* refactor: Update OpenID authentication to use randomState from openid-client
* chore: linting in presetSettings type definition
* chore: import order in ModelPanel
* refactor: remove `isLegacyOutput` property from AnthropicClient since only used where defined, add latest models to non-legacy patterns, and remove from client cleanup
* refactor: adjust grid layout in Parameters component for improved responsiveness
* refactor: adjust grid layout in ModelPanel for improved display of model parameters
* test: add cases for maxOutputTokens handling in Claude 4 Sonnet and Opus models
* ci: mock loadCustomConfig in server tests and refactor OpenID route for improved authentication handling
This commit is contained in:
parent
deb8a00e27
commit
c68cc0a550
10 changed files with 90 additions and 48 deletions
|
|
@ -74,9 +74,6 @@ class AnthropicClient extends BaseClient {
|
|||
/** Whether to use Messages API or Completions API
|
||||
* @type {boolean} */
|
||||
this.useMessages;
|
||||
/** Whether or not the model is limited to the legacy amount of output tokens
|
||||
* @type {boolean} */
|
||||
this.isLegacyOutput;
|
||||
/** Whether or not the model supports Prompt Caching
|
||||
* @type {boolean} */
|
||||
this.supportsCacheControl;
|
||||
|
|
@ -118,13 +115,16 @@ class AnthropicClient extends BaseClient {
|
|||
const modelMatch = matchModelName(this.modelOptions.model, EModelEndpoint.anthropic);
|
||||
this.isClaudeLatest =
|
||||
/claude-[3-9]/.test(modelMatch) || /claude-(?:sonnet|opus|haiku)-[4-9]/.test(modelMatch);
|
||||
this.isLegacyOutput = !(
|
||||
/claude-3[-.]5-sonnet/.test(modelMatch) || /claude-3[-.]7/.test(modelMatch)
|
||||
const isLegacyOutput = !(
|
||||
/claude-3[-.]5-sonnet/.test(modelMatch) ||
|
||||
/claude-3[-.]7/.test(modelMatch) ||
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(modelMatch) ||
|
||||
/claude-[4-9]/.test(modelMatch)
|
||||
);
|
||||
this.supportsCacheControl = this.options.promptCache && checkPromptCacheSupport(modelMatch);
|
||||
|
||||
if (
|
||||
this.isLegacyOutput &&
|
||||
isLegacyOutput &&
|
||||
this.modelOptions.maxOutputTokens &&
|
||||
this.modelOptions.maxOutputTokens > legacy.maxOutputTokens.default
|
||||
) {
|
||||
|
|
|
|||
|
|
@ -514,6 +514,34 @@ describe('AnthropicClient', () => {
|
|||
expect(client.modelOptions.maxOutputTokens).toBe(highTokenValue);
|
||||
});
|
||||
|
||||
it('should not cap maxOutputTokens for Claude 4 Sonnet models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const highTokenValue = anthropicSettings.legacy.maxOutputTokens.default * 10; // 40,960 tokens
|
||||
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-sonnet-4-20250514',
|
||||
maxOutputTokens: highTokenValue,
|
||||
},
|
||||
});
|
||||
|
||||
expect(client.modelOptions.maxOutputTokens).toBe(highTokenValue);
|
||||
});
|
||||
|
||||
it('should not cap maxOutputTokens for Claude 4 Opus models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const highTokenValue = anthropicSettings.legacy.maxOutputTokens.default * 6; // 24,576 tokens (under 32K limit)
|
||||
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-opus-4-20250514',
|
||||
maxOutputTokens: highTokenValue,
|
||||
},
|
||||
});
|
||||
|
||||
expect(client.modelOptions.maxOutputTokens).toBe(highTokenValue);
|
||||
});
|
||||
|
||||
it('should cap maxOutputTokens for Claude 3.5 Haiku models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const highTokenValue = anthropicSettings.legacy.maxOutputTokens.default * 2;
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue