🤖 feat: Model Specs & Save Tools per Convo/Preset (#2578)

* WIP: first pass ModelSpecs

* refactor(onSelectEndpoint): use `getConvoSwitchLogic`

* feat: introduce iconURL, greeting, frontend fields for conversations/presets/messages

* feat: conversation.iconURL & greeting in Landing

* feat: conversation.iconURL & greeting in New Chat button

* feat: message.iconURL

* refactor: ConversationIcon -> ConvoIconURL

* WIP: add spec as a conversation field

* refactor: useAppStartup, set spec on initial load for new chat, allow undefined spec, add localStorage keys enum, additional type fields for spec

* feat: handle `showIconInMenu`, `showIconInHeader`, undefined `iconURL` and no specs on initial load

* chore: handle undefined or empty modelSpecs

* WIP: first pass, modelSpec schema for custom config

* refactor: move default filtered tools definition to ToolService

* feat: pass modelSpecs from backend via startupConfig

* refactor: modelSpecs config, return and define list

* fix: react error and include iconURL in responseMessage

* refactor: add iconURL to responseMessage only

* refactor: getIconEndpoint

* refactor: pass TSpecsConfig

* fix(assistants): differentiate compactAssistantSchema, correctly resets shared conversation state with other endpoints

* refactor: assistant id prefix localStorage key

* refactor: add more LocalStorageKeys and replace hardcoded values

* feat: prioritize spec on new chat behavior: last selected modelSpec behavior (localStorage)

* feat: first pass, interface config

* chore: WIP, todo: add warnings based on config.modelSpecs settings.

* feat: enforce modelSpecs if configured

* feat: show config file yaml errors

* chore: delete unused legacy Plugins component

* refactor: set tools to localStorage from recoil store

* chore: add stable recoil setter to useEffect deps

* refactor: save tools to conversation documents

* style(MultiSelectPop): dynamic height, remove unused import

* refactor(react-query): use localstorage keys and pass config to useAvailablePluginsQuery

* feat(utils): add mapPlugins

* refactor(Convo): use conversation.tools if defined, lastSelectedTools if not

* refactor: remove unused legacy code using `useSetOptions`, remove conditional flag `isMultiChat` for using legacy settings

* refactor(PluginStoreDialog): add exhaustive-deps which are stable react state setters

* fix(HeaderOptions): pass `popover` as true

* refactor(useSetStorage): use project enums

* refactor: use LocalStorageKeys enum

* fix: prevent setConversation from setting falsy values in lastSelectedTools

* refactor: use map for availableTools state and available Plugins query

* refactor(updateLastSelectedModel): organize logic better and add note on purpose

* fix(setAgentOption): prevent reseting last model to secondary model for gptPlugins

* refactor(buildDefaultConvo): use enum

* refactor: remove `useSetStorage` and consolidate areas where conversation state is saved to localStorage

* fix: conversations retain tools on refresh

* fix(gptPlugins): prevent nullish tools from being saved

* chore: delete useServerStream

* refactor: move initial plugins logic to useAppStartup

* refactor(MultiSelectDropDown): add more pass-in className props

* feat: use tools in presets

* chore: delete unused usePresetOptions

* refactor: new agentOptions default handling

* chore: note

* feat: add label and custom instructions to agents

* chore: remove 'disabled with tools' message

* style: move plugins to 2nd column in parameters

* fix: TPreset type for agentOptions

* fix: interface controls

* refactor: add interfaceConfig, use Separator within Switcher

* refactor: hide Assistants panel if interface.parameters are disabled

* fix(Header): only modelSpecs if list is greater than 0

* refactor: separate MessageIcon logic from useMessageHelpers for better react rule-following

* fix(AppService): don't use reserved keyword 'interface'

* feat: set existing Icon for custom endpoints through iconURL

* fix(ci): tests passing for App Service

* docs: refactor custom_config.md for readability and better organization, also include missing values

* docs: interface section and re-organize docs

* docs: update modelSpecs info

* chore: remove unused files

* chore: remove unused files

* chore: move useSetIndexOptions

* chore: remove unused file

* chore: move useConversation(s)

* chore: move useDefaultConvo

* chore: move useNavigateToConvo

* refactor: use plugin install hook so it can be used elsewhere

* chore: import order

* update docs

* refactor(OpenAI/Plugins): allow modelLabel as an initial value for chatGptLabel

* chore: remove unused EndpointOptionsPopover and hide 'Save as Preset' button if preset UI visibility disabled

* feat(loadDefaultInterface): issue warnings based on values

* feat: changelog for custom config file

* docs: add additional changelog note

* fix: prevent unavailable tool selection from preset and update availableTools on Plugin installations

* feat: add `filteredTools` option in custom config

* chore: changelog

* fix(MessageIcon): always overwrite conversation.iconURL in messageSettings

* fix(ModelSpecsMenu): icon edge cases

* fix(NewChat): dynamic icon

* fix(PluginsClient): always include endpoint in responseMessage

* fix: always include endpoint and iconURL in responseMessage across different response methods

* feat: interchangeable keys for modelSpec enforcing
This commit is contained in:
Danny Avila 2024-04-30 22:11:48 -04:00 committed by GitHub
parent a5cac03fa4
commit 0e50c07e3f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
130 changed files with 3934 additions and 2973 deletions

View file

@ -5,6 +5,14 @@ import type { TFile } from './types/files';
export const isUUID = z.string().uuid();
export enum AuthType {
OVERRIDE_AUTH = 'override_auth',
USER_PROVIDED = 'user_provided',
SYSTEM_DEFINED = 'SYSTEM_DEFINED',
}
export const authTypeSchema = z.nativeEnum(AuthType);
export enum EModelEndpoint {
azureOpenAI = 'azureOpenAI',
openAI = 'openAI',
@ -190,11 +198,37 @@ export const tExampleSchema = z.object({
export type TExample = z.infer<typeof tExampleSchema>;
export enum EAgent {
functions = 'functions',
classic = 'classic',
}
export const agentOptionSettings = {
model: {
default: 'gpt-4-turbo',
},
temperature: {
min: 0,
max: 1,
step: 0.01,
default: 0,
},
agent: {
default: EAgent.functions,
options: [EAgent.functions, EAgent.classic],
},
skipCompletion: {
default: true,
},
};
export const eAgentOptionsSchema = z.nativeEnum(EAgent);
export const tAgentOptionsSchema = z.object({
agent: z.string(),
skipCompletion: z.boolean(),
agent: z.string().default(EAgent.functions),
skipCompletion: z.boolean().default(agentOptionSettings.skipCompletion.default),
model: z.string(),
temperature: z.number(),
temperature: z.number().default(agentOptionSettings.temperature.default),
});
export const tMessageSchema = z.object({
@ -228,6 +262,8 @@ export const tMessageSchema = z.object({
finish_reason: z.string().optional(),
/* assistant */
thread_id: z.string().optional(),
/* frontend components */
iconURL: z.string().optional(),
});
export type TMessage = z.input<typeof tMessageSchema> & {
@ -246,7 +282,7 @@ export const tConversationSchema = z.object({
endpointType: eModelEndpointSchema.optional(),
suggestions: z.array(z.string()).optional(),
messages: z.array(z.string()).optional(),
tools: z.array(tPluginSchema).optional(),
tools: z.union([z.array(tPluginSchema), z.array(z.string())]).optional(),
createdAt: z.string(),
updatedAt: z.string(),
systemMessage: z.string().nullable().optional(),
@ -284,6 +320,10 @@ export const tConversationSchema = z.object({
/** Used to overwrite active conversation settings when saving a Preset */
presetOverride: z.record(z.unknown()).optional(),
stop: z.array(z.string()).optional(),
/* frontend components */
iconURL: z.string().optional(),
greeting: z.string().optional(),
spec: z.string().optional(),
});
export const tPresetSchema = tConversationSchema
@ -329,6 +369,7 @@ export type TConversation = z.infer<typeof tConversationSchema> & {
export const openAISchema = tConversationSchema
.pick({
model: true,
modelLabel: true,
chatGptLabel: true,
promptPrefix: true,
temperature: true,
@ -338,11 +379,14 @@ export const openAISchema = tConversationSchema
resendFiles: true,
imageDetail: true,
stop: true,
iconURL: true,
greeting: true,
spec: true,
})
.transform((obj) => ({
...obj,
model: obj.model ?? openAISettings.model.default,
chatGptLabel: obj.chatGptLabel ?? null,
chatGptLabel: obj.modelLabel ?? obj.chatGptLabel ?? null,
promptPrefix: obj.promptPrefix ?? null,
temperature: obj.temperature ?? openAISettings.temperature.default,
top_p: obj.top_p ?? openAISettings.top_p.default,
@ -352,6 +396,9 @@ export const openAISchema = tConversationSchema
typeof obj.resendFiles === 'boolean' ? obj.resendFiles : openAISettings.resendFiles.default,
imageDetail: obj.imageDetail ?? openAISettings.imageDetail.default,
stop: obj.stop ?? undefined,
iconURL: obj.iconURL ?? undefined,
greeting: obj.greeting ?? undefined,
spec: obj.spec ?? undefined,
}))
.catch(() => ({
model: openAISettings.model.default,
@ -364,6 +411,9 @@ export const openAISchema = tConversationSchema
resendFiles: openAISettings.resendFiles.default,
imageDetail: openAISettings.imageDetail.default,
stop: undefined,
iconURL: undefined,
greeting: undefined,
spec: undefined,
}));
export const googleSchema = tConversationSchema
@ -376,6 +426,9 @@ export const googleSchema = tConversationSchema
maxOutputTokens: true,
topP: true,
topK: true,
iconURL: true,
greeting: true,
spec: true,
})
.transform((obj) => {
const isGemini = obj?.model?.toLowerCase()?.includes('gemini');
@ -400,6 +453,9 @@ export const googleSchema = tConversationSchema
maxOutputTokens,
topP: obj.topP ?? google.topP.default,
topK: obj.topK ?? google.topK.default,
iconURL: obj.iconURL ?? undefined,
greeting: obj.greeting ?? undefined,
spec: obj.spec ?? undefined,
};
})
.catch(() => ({
@ -411,6 +467,9 @@ export const googleSchema = tConversationSchema
maxOutputTokens: google.maxOutputTokens.default,
topP: google.topP.default,
topK: google.topK.default,
iconURL: undefined,
greeting: undefined,
spec: undefined,
}));
export const bingAISchema = tConversationSchema
@ -458,6 +517,9 @@ export const anthropicSchema = tConversationSchema
topP: true,
topK: true,
resendFiles: true,
iconURL: true,
greeting: true,
spec: true,
})
.transform((obj) => ({
...obj,
@ -469,6 +531,9 @@ export const anthropicSchema = tConversationSchema
topP: obj.topP ?? 0.7,
topK: obj.topK ?? 5,
resendFiles: typeof obj.resendFiles === 'boolean' ? obj.resendFiles : true,
iconURL: obj.iconURL ?? undefined,
greeting: obj.greeting ?? undefined,
spec: obj.spec ?? undefined,
}))
.catch(() => ({
model: 'claude-1',
@ -479,6 +544,9 @@ export const anthropicSchema = tConversationSchema
topP: 0.7,
topK: 5,
resendFiles: true,
iconURL: undefined,
greeting: undefined,
spec: undefined,
}));
export const chatGPTBrowserSchema = tConversationSchema
@ -496,6 +564,7 @@ export const chatGPTBrowserSchema = tConversationSchema
export const gptPluginsSchema = tConversationSchema
.pick({
model: true,
modelLabel: true,
chatGptLabel: true,
promptPrefix: true,
temperature: true,
@ -504,11 +573,14 @@ export const gptPluginsSchema = tConversationSchema
frequency_penalty: true,
tools: true,
agentOptions: true,
iconURL: true,
greeting: true,
spec: true,
})
.transform((obj) => ({
...obj,
model: obj.model ?? 'gpt-3.5-turbo',
chatGptLabel: obj.chatGptLabel ?? null,
chatGptLabel: obj.modelLabel ?? obj.chatGptLabel ?? null,
promptPrefix: obj.promptPrefix ?? null,
temperature: obj.temperature ?? 0.8,
top_p: obj.top_p ?? 1,
@ -516,11 +588,14 @@ export const gptPluginsSchema = tConversationSchema
frequency_penalty: obj.frequency_penalty ?? 0,
tools: obj.tools ?? [],
agentOptions: obj.agentOptions ?? {
agent: 'functions',
agent: EAgent.functions,
skipCompletion: true,
model: 'gpt-3.5-turbo',
temperature: 0,
},
iconURL: obj.iconURL ?? undefined,
greeting: obj.greeting ?? undefined,
spec: obj.spec ?? undefined,
}))
.catch(() => ({
model: 'gpt-3.5-turbo',
@ -532,11 +607,14 @@ export const gptPluginsSchema = tConversationSchema
frequency_penalty: 0,
tools: [],
agentOptions: {
agent: 'functions',
agent: EAgent.functions,
skipCompletion: true,
model: 'gpt-3.5-turbo',
temperature: 0,
},
iconURL: undefined,
greeting: undefined,
spec: undefined,
}));
export function removeNullishValues<T extends object>(obj: T): T {
@ -557,7 +635,41 @@ export const assistantSchema = tConversationSchema
assistant_id: true,
instructions: true,
promptPrefix: true,
iconURL: true,
greeting: true,
spec: true,
})
.transform((obj) => ({
...obj,
model: obj.model ?? openAISettings.model.default,
assistant_id: obj.assistant_id ?? undefined,
instructions: obj.instructions ?? undefined,
promptPrefix: obj.promptPrefix ?? null,
iconURL: obj.iconURL ?? undefined,
greeting: obj.greeting ?? undefined,
spec: obj.spec ?? undefined,
}))
.catch(() => ({
model: openAISettings.model.default,
assistant_id: undefined,
instructions: undefined,
promptPrefix: null,
iconURL: undefined,
greeting: undefined,
spec: undefined,
}));
export const compactAssistantSchema = tConversationSchema
.pick({
model: true,
assistant_id: true,
instructions: true,
promptPrefix: true,
iconURL: true,
greeting: true,
spec: true,
})
// will change after adding temperature
.transform(removeNullishValues)
.catch(() => ({}));
@ -573,6 +685,9 @@ export const compactOpenAISchema = tConversationSchema
resendFiles: true,
imageDetail: true,
stop: true,
iconURL: true,
greeting: true,
spec: true,
})
.transform((obj: Partial<TConversation>) => {
const newObj: Partial<TConversation> = { ...obj };
@ -609,6 +724,9 @@ export const compactGoogleSchema = tConversationSchema
maxOutputTokens: true,
topP: true,
topK: true,
iconURL: true,
greeting: true,
spec: true,
})
.transform((obj) => {
const newObj: Partial<TConversation> = { ...obj };
@ -639,6 +757,9 @@ export const compactAnthropicSchema = tConversationSchema
topP: true,
topK: true,
resendFiles: true,
iconURL: true,
greeting: true,
spec: true,
})
.transform((obj) => {
const newObj: Partial<TConversation> = { ...obj };
@ -683,6 +804,9 @@ export const compactPluginsSchema = tConversationSchema
frequency_penalty: true,
tools: true,
agentOptions: true,
iconURL: true,
greeting: true,
spec: true,
})
.transform((obj) => {
const newObj: Partial<TConversation> = { ...obj };
@ -710,7 +834,7 @@ export const compactPluginsSchema = tConversationSchema
if (
newObj.agentOptions &&
newObj.agentOptions.agent === 'functions' &&
newObj.agentOptions.agent === EAgent.functions &&
newObj.agentOptions.skipCompletion === true &&
newObj.agentOptions.model === 'gpt-3.5-turbo' &&
newObj.agentOptions.temperature === 0