📊 refactor: use Parameters from Side Panel for OpenAI, Anthropic, and Custom endpoints (#4092)

* feat: openai parameters

* refactor: anthropic/bedrock params, add preset params for openai, and add azure params

* refactor: use 'compact' schemas for anthropic/openai

* refactor: ensure custom endpoints are properly recognized as valid param endpoints

* refactor: update paramEndpoints check in BaseClient.js

* chore: optimize logging by omitting modelsConfig

* refactor: update label casing in baseDefinitions combobox items

* fix: remove 'stop' model options when using o1 series models

* refactor(AnthropicClient): remove default `stop` value

* refactor: reset params on parameters change

* refactor: remove unused default parameter value map introduced in prior commit

* fix: 'min' typo for 'max' value

* refactor: preset settings

* refactor: replace dropdown for image detail with slider; remove `preventDelayedUpdate` condition from DynamicSlider

* fix: localizations for freq./pres. penalty

* Refactor maxOutputTokens to use coerceNumber in tConversationSchema

* refactor(AnthropicClient): use `getModelMaxOutputTokens`
This commit is contained in:
Danny Avila 2024-09-17 22:25:54 -04:00 committed by GitHub
parent ebdbfe8427
commit 8dc5b320bc
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
20 changed files with 575 additions and 1103 deletions

View file

@ -720,11 +720,6 @@ export const modularEndpoints = new Set<EModelEndpoint | string>([
EModelEndpoint.bedrock,
]);
export const paramEndpoints = new Set<EModelEndpoint | string>([
EModelEndpoint.agents,
EModelEndpoint.bedrock,
]);
export const supportsBalanceCheck = {
[EModelEndpoint.custom]: true,
[EModelEndpoint.openAI]: true,

View file

@ -13,13 +13,11 @@ import {
gptPluginsSchema,
// agentsSchema,
compactAgentsSchema,
compactOpenAISchema,
compactGoogleSchema,
compactChatGPTSchema,
chatGPTBrowserSchema,
compactPluginsSchema,
compactAssistantSchema,
compactAnthropicSchema,
} from './schemas';
import { bedrockInputSchema } from './bedrock';
import { alternateName } from './config';
@ -302,20 +300,20 @@ export const getResponseSender = (endpointOption: t.TEndpointOption): string =>
};
type CompactEndpointSchema =
| typeof compactOpenAISchema
| typeof openAISchema
| typeof compactAssistantSchema
| typeof compactAgentsSchema
| typeof compactGoogleSchema
| typeof bingAISchema
| typeof compactAnthropicSchema
| typeof anthropicSchema
| typeof compactChatGPTSchema
| typeof bedrockInputSchema
| typeof compactPluginsSchema;
const compactEndpointSchemas: Record<string, CompactEndpointSchema> = {
[EModelEndpoint.openAI]: compactOpenAISchema,
[EModelEndpoint.azureOpenAI]: compactOpenAISchema,
[EModelEndpoint.custom]: compactOpenAISchema,
[EModelEndpoint.openAI]: openAISchema,
[EModelEndpoint.azureOpenAI]: openAISchema,
[EModelEndpoint.custom]: openAISchema,
[EModelEndpoint.assistants]: compactAssistantSchema,
[EModelEndpoint.azureAssistants]: compactAssistantSchema,
[EModelEndpoint.agents]: compactAgentsSchema,
@ -323,7 +321,7 @@ const compactEndpointSchemas: Record<string, CompactEndpointSchema> = {
[EModelEndpoint.bedrock]: bedrockInputSchema,
/* BingAI needs all fields */
[EModelEndpoint.bingAI]: bingAISchema,
[EModelEndpoint.anthropic]: compactAnthropicSchema,
[EModelEndpoint.anthropic]: anthropicSchema,
[EModelEndpoint.chatGPTBrowser]: compactChatGPTSchema,
[EModelEndpoint.gptPlugins]: compactPluginsSchema,
};

View file

@ -28,6 +28,14 @@ export enum EModelEndpoint {
bedrock = 'bedrock',
}
export const paramEndpoints = new Set<EModelEndpoint | string>([
EModelEndpoint.agents,
EModelEndpoint.bedrock,
EModelEndpoint.openAI,
EModelEndpoint.anthropic,
EModelEndpoint.custom,
]);
export enum BedrockProviders {
AI21 = 'ai21',
Amazon = 'amazon',
@ -72,6 +80,21 @@ export const isAgentsEndpoint = (_endpoint?: EModelEndpoint.agents | null | stri
return endpoint === EModelEndpoint.agents;
};
export const isParamEndpoint = (
endpoint: EModelEndpoint | string,
endpointType?: EModelEndpoint | string,
): boolean => {
if (paramEndpoints.has(endpoint)) {
return true;
}
if (endpointType != null) {
return paramEndpoints.has(endpointType);
}
return false;
};
export enum ImageDetail {
low = 'low',
auto = 'auto',
@ -500,7 +523,7 @@ export const tConversationSchema = z.object({
frequency_penalty: z.number().optional(),
presence_penalty: z.number().optional(),
parentMessageId: z.string().optional(),
maxOutputTokens: z.number().optional(),
maxOutputTokens: coerceNumber.optional(),
maxContextTokens: coerceNumber.optional(),
max_tokens: coerceNumber.optional(),
/* Anthropic */
@ -630,71 +653,6 @@ export const tConversationTagSchema = z.object({
});
export type TConversationTag = z.infer<typeof tConversationTagSchema>;
export const openAISchema = tConversationSchema
.pick({
model: true,
modelLabel: true,
chatGptLabel: true,
promptPrefix: true,
temperature: true,
top_p: true,
presence_penalty: true,
frequency_penalty: true,
resendFiles: true,
artifacts: true,
imageDetail: true,
stop: true,
iconURL: true,
greeting: true,
spec: true,
maxContextTokens: true,
max_tokens: true,
})
.transform((obj) => {
const result = {
...obj,
model: obj.model ?? openAISettings.model.default,
chatGptLabel: obj.chatGptLabel ?? obj.modelLabel ?? null,
promptPrefix: obj.promptPrefix ?? null,
temperature: obj.temperature ?? openAISettings.temperature.default,
top_p: obj.top_p ?? openAISettings.top_p.default,
presence_penalty: obj.presence_penalty ?? openAISettings.presence_penalty.default,
frequency_penalty: obj.frequency_penalty ?? openAISettings.frequency_penalty.default,
resendFiles:
typeof obj.resendFiles === 'boolean' ? obj.resendFiles : openAISettings.resendFiles.default,
imageDetail: obj.imageDetail ?? openAISettings.imageDetail.default,
stop: obj.stop ?? undefined,
iconURL: obj.iconURL ?? undefined,
greeting: obj.greeting ?? undefined,
spec: obj.spec ?? undefined,
maxContextTokens: obj.maxContextTokens ?? undefined,
max_tokens: obj.max_tokens ?? undefined,
};
if (obj.modelLabel != null && obj.modelLabel !== '') {
result.modelLabel = null;
}
return result;
})
.catch(() => ({
model: openAISettings.model.default,
chatGptLabel: null,
promptPrefix: null,
temperature: openAISettings.temperature.default,
top_p: openAISettings.top_p.default,
presence_penalty: openAISettings.presence_penalty.default,
frequency_penalty: openAISettings.frequency_penalty.default,
resendFiles: openAISettings.resendFiles.default,
imageDetail: openAISettings.imageDetail.default,
stop: undefined,
iconURL: undefined,
greeting: undefined,
spec: undefined,
maxContextTokens: undefined,
max_tokens: undefined,
}));
export const googleSchema = tConversationSchema
.pick({
model: true,
@ -778,64 +736,6 @@ export const bingAISchema = tConversationSchema
invocationId: 1,
}));
export const anthropicSchema = tConversationSchema
.pick({
model: true,
modelLabel: true,
promptPrefix: true,
temperature: true,
maxOutputTokens: true,
topP: true,
topK: true,
resendFiles: true,
promptCache: true,
artifacts: true,
iconURL: true,
greeting: true,
spec: true,
maxContextTokens: true,
})
.transform((obj) => {
const model = obj.model ?? anthropicSettings.model.default;
return {
...obj,
model,
modelLabel: obj.modelLabel ?? null,
promptPrefix: obj.promptPrefix ?? null,
temperature: obj.temperature ?? anthropicSettings.temperature.default,
maxOutputTokens: obj.maxOutputTokens ?? anthropicSettings.maxOutputTokens.reset(model),
topP: obj.topP ?? anthropicSettings.topP.default,
topK: obj.topK ?? anthropicSettings.topK.default,
promptCache:
typeof obj.promptCache === 'boolean'
? obj.promptCache
: anthropicSettings.promptCache.default,
resendFiles:
typeof obj.resendFiles === 'boolean'
? obj.resendFiles
: anthropicSettings.resendFiles.default,
iconURL: obj.iconURL ?? undefined,
greeting: obj.greeting ?? undefined,
spec: obj.spec ?? undefined,
maxContextTokens: obj.maxContextTokens ?? anthropicSettings.maxContextTokens.default,
};
})
.catch(() => ({
model: anthropicSettings.model.default,
modelLabel: null,
promptPrefix: null,
temperature: anthropicSettings.temperature.default,
maxOutputTokens: anthropicSettings.maxOutputTokens.default,
topP: anthropicSettings.topP.default,
topK: anthropicSettings.topK.default,
resendFiles: anthropicSettings.resendFiles.default,
promptCache: anthropicSettings.promptCache.default,
iconURL: undefined,
greeting: undefined,
spec: undefined,
maxContextTokens: anthropicSettings.maxContextTokens.default,
}));
export const chatGPTBrowserSchema = tConversationSchema
.pick({
model: true,
@ -1027,7 +927,7 @@ export const agentsSchema = tConversationSchema
maxContextTokens: undefined,
}));
export const compactOpenAISchema = tConversationSchema
export const openAISchema = tConversationSchema
.pick({
model: true,
chatGptLabel: true,
@ -1046,29 +946,7 @@ export const compactOpenAISchema = tConversationSchema
maxContextTokens: true,
max_tokens: true,
})
.transform((obj: Partial<TConversation>) => {
const newObj: Partial<TConversation> = { ...obj };
if (newObj.temperature === openAISettings.temperature.default) {
delete newObj.temperature;
}
if (newObj.top_p === openAISettings.top_p.default) {
delete newObj.top_p;
}
if (newObj.presence_penalty === openAISettings.presence_penalty.default) {
delete newObj.presence_penalty;
}
if (newObj.frequency_penalty === openAISettings.frequency_penalty.default) {
delete newObj.frequency_penalty;
}
if (newObj.resendFiles === openAISettings.resendFiles.default) {
delete newObj.resendFiles;
}
if (newObj.imageDetail === openAISettings.imageDetail.default) {
delete newObj.imageDetail;
}
return removeNullishValues(newObj);
})
.transform((obj: Partial<TConversation>) => removeNullishValues(obj))
.catch(() => ({}));
export const compactGoogleSchema = tConversationSchema
@ -1106,7 +984,7 @@ export const compactGoogleSchema = tConversationSchema
})
.catch(() => ({}));
export const compactAnthropicSchema = tConversationSchema
export const anthropicSchema = tConversationSchema
.pick({
model: true,
modelLabel: true,
@ -1123,29 +1001,7 @@ export const compactAnthropicSchema = tConversationSchema
spec: true,
maxContextTokens: true,
})
.transform((obj) => {
const newObj: Partial<TConversation> = { ...obj };
if (newObj.temperature === anthropicSettings.temperature.default) {
delete newObj.temperature;
}
if (newObj.maxOutputTokens === anthropicSettings.legacy.maxOutputTokens.default) {
delete newObj.maxOutputTokens;
}
if (newObj.topP === anthropicSettings.topP.default) {
delete newObj.topP;
}
if (newObj.topK === anthropicSettings.topK.default) {
delete newObj.topK;
}
if (newObj.resendFiles === anthropicSettings.resendFiles.default) {
delete newObj.resendFiles;
}
if (newObj.promptCache === anthropicSettings.promptCache.default) {
delete newObj.promptCache;
}
return removeNullishValues(newObj);
})
.transform((obj) => removeNullishValues(obj))
.catch(() => ({}));
export const compactChatGPTSchema = tConversationSchema