🧠 feat: Prompt caching switch, prompt query params; refactor: static cache, prompt/markdown styling, trim copied code, switch new chat to convo URL (#3784)

* refactor: Update staticCache to use oneDayInSeconds for sMaxAge and maxAge

* refactor: role updates

* style: first pass cursor

* style: Update nested list styles in style.css

* feat: setIsSubmitting to true in message handler to prevent edge case where submitting turns false during message stream

* feat: Add logic to redirect to conversation page after creating a new conversation

* refactor: Trim code string before copying in CodeBlock component

* feat: configSchema bookmarks and presets defaults

* feat: Update loadDefaultInterface to handle undefined config

* refactor: use  for compression check

* feat: first pass, query params

* fix: styling issues for prompt cards

* feat: anthropic prompt caching UI switch

* chore: Update static file cache control defaults/comments in .env.example

* ci: fix tests

* ci: fix tests

* chore:  use "submitting" class server error connection suspense fallback
This commit is contained in:
Danny Avila 2024-08-26 15:34:46 -04:00 committed by GitHub
parent bd701c197e
commit 5694ad4e55
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
31 changed files with 519 additions and 112 deletions

View file

@ -1,6 +1,6 @@
{
"name": "librechat-data-provider",
"version": "0.7.416",
"version": "0.7.417",
"description": "data services for librechat apps",
"main": "dist/index.js",
"module": "dist/index.es.js",

View file

@ -423,6 +423,8 @@ export const configSchema = z.object({
parameters: true,
sidePanel: true,
presets: true,
bookmarks: true,
prompts: true,
}),
fileStrategy: fileSourceSchema.default(FileSources.local),
registration: z

View file

@ -171,6 +171,9 @@ export const anthropicSettings = {
step: 0.01,
default: 1,
},
promptCache: {
default: true,
},
maxOutputTokens: {
min: 1,
max: ANTHROPIC_MAX_OUTPUT,
@ -393,6 +396,8 @@ export const tConversationSchema = z.object({
file_ids: z.array(z.string()).optional(),
maxContextTokens: coerceNumber.optional(),
max_tokens: coerceNumber.optional(),
/* Anthropic */
promptCache: z.boolean().optional(),
/* vision */
resendFiles: z.boolean().optional(),
imageDetail: eImageDetailSchema.optional(),
@ -648,6 +653,7 @@ export const anthropicSchema = tConversationSchema
topP: true,
topK: true,
resendFiles: true,
promptCache: true,
iconURL: true,
greeting: true,
spec: true,
@ -664,6 +670,10 @@ export const anthropicSchema = tConversationSchema
maxOutputTokens: obj.maxOutputTokens ?? anthropicSettings.maxOutputTokens.reset(model),
topP: obj.topP ?? anthropicSettings.topP.default,
topK: obj.topK ?? anthropicSettings.topK.default,
promptCache:
typeof obj.promptCache === 'boolean'
? obj.promptCache
: anthropicSettings.promptCache.default,
resendFiles:
typeof obj.resendFiles === 'boolean'
? obj.resendFiles
@ -683,6 +693,7 @@ export const anthropicSchema = tConversationSchema
topP: anthropicSettings.topP.default,
topK: anthropicSettings.topK.default,
resendFiles: anthropicSettings.resendFiles.default,
promptCache: anthropicSettings.promptCache.default,
iconURL: undefined,
greeting: undefined,
spec: undefined,
@ -911,6 +922,7 @@ export const compactAnthropicSchema = tConversationSchema
topP: true,
topK: true,
resendFiles: true,
promptCache: true,
iconURL: true,
greeting: true,
spec: true,
@ -933,6 +945,9 @@ export const compactAnthropicSchema = tConversationSchema
if (newObj.resendFiles === anthropicSettings.resendFiles.default) {
delete newObj.resendFiles;
}
if (newObj.promptCache === anthropicSettings.promptCache.default) {
delete newObj.promptCache;
}
return removeNullishValues(newObj);
})

View file

@ -26,6 +26,7 @@ export type TEndpointOption = {
endpointType?: EModelEndpoint;
modelDisplayLabel?: string;
resendFiles?: boolean;
promptCache?: boolean;
maxContextTokens?: number;
imageDetail?: ImageDetail;
model?: string | null;