mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-24 04:10:15 +01:00
📋 feat: Log Custom Config File and Add Known Model Limits to Custom Endpoint (#1657)
* refactor(custom): add all recognized models to maxTokensMap for custom endpoint * feat(librechat.yaml): log the custom config file on initial load * fix(OpenAIClient): pass endpointType/endpoint to `getModelMaxTokens` call
This commit is contained in:
parent
c470147ea2
commit
f7f7f929a0
3 changed files with 27 additions and 21 deletions
|
|
@ -131,7 +131,8 @@ class OpenAIClient extends BaseClient {
|
|||
const { isChatGptModel } = this;
|
||||
this.isUnofficialChatGptModel =
|
||||
model.startsWith('text-chat') || model.startsWith('text-davinci-002-render');
|
||||
this.maxContextTokens = getModelMaxTokens(model) ?? 4095; // 1 less than maximum
|
||||
this.maxContextTokens =
|
||||
getModelMaxTokens(model, this.options.endpointType ?? this.options.endpoint) ?? 4095; // 1 less than maximum
|
||||
|
||||
if (this.shouldSummarize) {
|
||||
this.maxContextTokens = Math.floor(this.maxContextTokens / 2);
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue