🤖 refactor: Remove Default Model Params for All Endpoints (#3682)

* refactor: use parseCompactConvo in buildOptions, and generate no default values for the API to avoid weird model behavior with defaults

* refactor: OTHER - always show cursor when markdown component is empty (preferable to not)

* refactor(OpenAISettings): use config object for setting defaults app-wide

* refactor: Use removeNullishValues in buildOptions for ALL  endpoints

* fix: add missing conversationId to title methods for transactions; refactor(GoogleClient): model options, set no default, add todo note for recording token usage

* fix: at minimum set a model default, as is required by API (edge case)
This commit is contained in:
Danny Avila 2024-08-18 06:00:03 -04:00 committed by GitHub
parent d3a20357e9
commit 683702d555
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
19 changed files with 169 additions and 141 deletions

View file

@ -83,11 +83,13 @@ class AnthropicClient extends BaseClient {
this.options = options;
}
const modelOptions = this.options.modelOptions || {};
this.modelOptions = {
...modelOptions,
model: modelOptions.model || anthropicSettings.model.default,
};
this.modelOptions = Object.assign(
{
model: anthropicSettings.model.default,
},
this.modelOptions,
this.options.modelOptions,
);
const modelMatch = matchModelName(this.modelOptions.model, EModelEndpoint.anthropic);
this.isClaude3 = modelMatch.startsWith('claude-3');

View file

@ -120,19 +120,7 @@ class GoogleClient extends BaseClient {
.filter((ex) => ex)
.filter((obj) => obj.input.content !== '' && obj.output.content !== '');
const modelOptions = this.options.modelOptions || {};
this.modelOptions = {
...modelOptions,
// set some good defaults (check for undefined in some cases because they may be 0)
model: modelOptions.model || settings.model.default,
temperature:
typeof modelOptions.temperature === 'undefined'
? settings.temperature.default
: modelOptions.temperature,
topP: typeof modelOptions.topP === 'undefined' ? settings.topP.default : modelOptions.topP,
topK: typeof modelOptions.topK === 'undefined' ? settings.topK.default : modelOptions.topK,
// stop: modelOptions.stop // no stop method for now
};
this.modelOptions = this.options.modelOptions || {};
this.options.attachments?.then((attachments) => this.checkVisionRequest(attachments));
@ -808,7 +796,7 @@ class GoogleClient extends BaseClient {
});
reply = titleResponse.content;
// TODO: RECORD TOKEN USAGE
return reply;
}
}

View file

@ -6,6 +6,7 @@ const {
ImageDetail,
EModelEndpoint,
resolveHeaders,
openAISettings,
ImageDetailCost,
CohereConstants,
getResponseSender,
@ -85,26 +86,13 @@ class OpenAIClient extends BaseClient {
this.apiKey = this.options.openaiApiKey;
}
const modelOptions = this.options.modelOptions || {};
if (!this.modelOptions) {
this.modelOptions = {
...modelOptions,
model: modelOptions.model || 'gpt-3.5-turbo',
temperature:
typeof modelOptions.temperature === 'undefined' ? 0.8 : modelOptions.temperature,
top_p: typeof modelOptions.top_p === 'undefined' ? 1 : modelOptions.top_p,
presence_penalty:
typeof modelOptions.presence_penalty === 'undefined' ? 1 : modelOptions.presence_penalty,
stop: modelOptions.stop,
};
} else {
// Update the modelOptions if it already exists
this.modelOptions = {
...this.modelOptions,
...modelOptions,
};
}
this.modelOptions = Object.assign(
{
model: openAISettings.model.default,
},
this.modelOptions,
this.options.modelOptions,
);
this.defaultVisionModel = this.options.visionModel ?? 'gpt-4-vision-preview';
if (typeof this.options.attachments?.then === 'function') {

View file

@ -1,4 +1,4 @@
const { parseConvo, EModelEndpoint } = require('librechat-data-provider');
const { parseCompactConvo, EModelEndpoint } = require('librechat-data-provider');
const { getModelsConfig } = require('~/server/controllers/ModelController');
const azureAssistants = require('~/server/services/Endpoints/azureAssistants');
const assistants = require('~/server/services/Endpoints/assistants');
@ -24,7 +24,7 @@ const buildFunction = {
async function buildEndpointOption(req, res, next) {
const { endpoint, endpointType } = req.body;
const parsedBody = parseConvo({ endpoint, endpointType, conversation: req.body });
const parsedBody = parseCompactConvo({ endpoint, endpointType, conversation: req.body });
if (req.app.locals.modelSpecs?.list && req.app.locals.modelSpecs?.enforce) {
/** @type {{ list: TModelSpec[] }}*/

View file

@ -21,7 +21,11 @@ const addTitle = async (req, { text, response, client }) => {
const titleCache = getLogStores(CacheKeys.GEN_TITLE);
const key = `${req.user.id}-${response.conversationId}`;
const title = await client.titleConvo({ text, responseText: response?.text });
const title = await client.titleConvo({
text,
responseText: response?.text,
conversationId: response.conversationId,
});
await titleCache.set(key, title, 120000);
await saveConvo(
req,

View file

@ -1,15 +1,18 @@
const { removeNullishValues } = require('librechat-data-provider');
const buildOptions = (endpoint, parsedBody) => {
const {
modelLabel,
promptPrefix,
maxContextTokens,
resendFiles,
resendFiles = true,
iconURL,
greeting,
spec,
...rest
...modelOptions
} = parsedBody;
const endpointOption = {
const endpointOption = removeNullishValues({
endpoint,
modelLabel,
promptPrefix,
@ -18,10 +21,8 @@ const buildOptions = (endpoint, parsedBody) => {
greeting,
spec,
maxContextTokens,
modelOptions: {
...rest,
},
};
modelOptions,
});
return endpointOption;
};

View file

@ -1,17 +1,17 @@
const { removeNullishValues } = require('librechat-data-provider');
const buildOptions = (endpoint, parsedBody) => {
// eslint-disable-next-line no-unused-vars
const { promptPrefix, assistant_id, iconURL, greeting, spec, ...rest } = parsedBody;
const endpointOption = {
const { promptPrefix, assistant_id, iconURL, greeting, spec, ...modelOptions } = parsedBody;
const endpointOption = removeNullishValues({
endpoint,
promptPrefix,
assistant_id,
iconURL,
greeting,
spec,
modelOptions: {
...rest,
},
};
modelOptions,
});
return endpointOption;
};

View file

@ -1,17 +1,17 @@
const { removeNullishValues } = require('librechat-data-provider');
const buildOptions = (endpoint, parsedBody) => {
// eslint-disable-next-line no-unused-vars
const { promptPrefix, assistant_id, iconURL, greeting, spec, ...rest } = parsedBody;
const endpointOption = {
const { promptPrefix, assistant_id, iconURL, greeting, spec, ...modelOptions } = parsedBody;
const endpointOption = removeNullishValues({
endpoint,
promptPrefix,
assistant_id,
iconURL,
greeting,
spec,
modelOptions: {
...rest,
},
};
modelOptions,
});
return endpointOption;
};

View file

@ -1,16 +1,18 @@
const { removeNullishValues } = require('librechat-data-provider');
const buildOptions = (endpoint, parsedBody, endpointType) => {
const {
chatGptLabel,
promptPrefix,
maxContextTokens,
resendFiles,
resendFiles = true,
imageDetail,
iconURL,
greeting,
spec,
...rest
...modelOptions
} = parsedBody;
const endpointOption = {
const endpointOption = removeNullishValues({
endpoint,
endpointType,
chatGptLabel,
@ -21,10 +23,8 @@ const buildOptions = (endpoint, parsedBody, endpointType) => {
greeting,
spec,
maxContextTokens,
modelOptions: {
...rest,
},
};
modelOptions,
});
return endpointOption;
};

View file

@ -47,7 +47,11 @@ const addTitle = async (req, { text, response, client }) => {
const titleCache = getLogStores(CacheKeys.GEN_TITLE);
const key = `${req.user.id}-${response.conversationId}`;
const title = await titleClient.titleConvo({ text, responseText: response?.text });
const title = await titleClient.titleConvo({
text,
responseText: response?.text,
conversationId: response.conversationId,
});
await titleCache.set(key, title, 120000);
await saveConvo(
req,

View file

@ -1,17 +1,27 @@
const { removeNullishValues } = require('librechat-data-provider');
const buildOptions = (endpoint, parsedBody) => {
const { examples, modelLabel, promptPrefix, iconURL, greeting, spec, ...rest } = parsedBody;
const endpointOption = {
const {
examples,
endpoint,
modelLabel,
resendFiles = true,
promptPrefix,
iconURL,
greeting,
spec,
modelOptions: {
...rest,
},
};
...modelOptions
} = parsedBody;
const endpointOption = removeNullishValues({
examples,
endpoint,
modelLabel,
resendFiles,
promptPrefix,
iconURL,
greeting,
spec,
modelOptions,
});
return endpointOption;
};

View file

@ -1,3 +1,5 @@
const { removeNullishValues } = require('librechat-data-provider');
const buildOptions = (endpoint, parsedBody) => {
const {
chatGptLabel,
@ -10,7 +12,7 @@ const buildOptions = (endpoint, parsedBody) => {
maxContextTokens,
...modelOptions
} = parsedBody;
const endpointOption = {
const endpointOption = removeNullishValues({
endpoint,
tools:
tools
@ -24,7 +26,7 @@ const buildOptions = (endpoint, parsedBody) => {
spec,
maxContextTokens,
modelOptions,
};
});
return endpointOption;
};

View file

@ -21,7 +21,11 @@ const addTitle = async (req, { text, response, client }) => {
const titleCache = getLogStores(CacheKeys.GEN_TITLE);
const key = `${req.user.id}-${response.conversationId}`;
const title = await client.titleConvo({ text, responseText: response?.text });
const title = await client.titleConvo({
text,
responseText: response?.text,
conversationId: response.conversationId,
});
await titleCache.set(key, title, 120000);
await saveConvo(
req,

View file

@ -1,16 +1,18 @@
const { removeNullishValues } = require('librechat-data-provider');
const buildOptions = (endpoint, parsedBody) => {
const {
chatGptLabel,
promptPrefix,
maxContextTokens,
resendFiles,
resendFiles = true,
imageDetail,
iconURL,
greeting,
spec,
...rest
...modelOptions
} = parsedBody;
const endpointOption = {
const endpointOption = removeNullishValues({
endpoint,
chatGptLabel,
promptPrefix,
@ -20,10 +22,8 @@ const buildOptions = (endpoint, parsedBody) => {
greeting,
spec,
maxContextTokens,
modelOptions: {
...rest,
},
};
modelOptions,
});
return endpointOption;
};