🔧 Fix: Resolve Anthropic Client Issues 🧠 (#1226)

* fix: correct preset title for Anthropic endpoint

* fix(Settings/Anthropic): show correct default value for LLM temperature

* fix(AnthropicClient): use `getModelMaxTokens` to get the correct LLM max context tokens, correctly set default temperature to 1, use only 2 params for class constructor, use `getResponseSender` to add correct sender to response message

* refactor(/api/ask|edit/anthropic): save messages to database after the final response is sent to the client, and do not save conversation from route controller

* fix(initializeClient/anthropic): correctly pass client options (endpointOption) to class initialization

* feat(ModelService/Anthropic): add claude-1.2
This commit is contained in:
Danny Avila 2023-11-26 14:44:57 -05:00 committed by GitHub
parent 4b289640f2
commit d7ef4590ea
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 73 additions and 45 deletions

View file

@ -62,6 +62,25 @@ describe('getModelMaxTokens', () => {
expect(getModelMaxTokens('gpt-4-1106-preview')).toBe(maxTokensMap['gpt-4-1106']);
expect(getModelMaxTokens('gpt-4-1106-vision-preview')).toBe(maxTokensMap['gpt-4-1106']);
});
test('should return correct tokens for Anthropic models', () => {
const models = [
'claude-2.1',
'claude-2',
'claude-1.2',
'claude-1',
'claude-1-100k',
'claude-instant-1',
'claude-instant-1-100k',
];
const claude21MaxTokens = maxTokensMap['claude-2.1'];
const claudeMaxTokens = maxTokensMap['claude-'];
models.forEach((model) => {
const expectedTokens = model === 'claude-2.1' ? claude21MaxTokens : claudeMaxTokens;
expect(getModelMaxTokens(model)).toEqual(expectedTokens);
});
});
});
describe('matchModelName', () => {