🧠 feat: Prompt caching switch, prompt query params; refactor: static cache, prompt/markdown styling, trim copied code, switch new chat to convo URL (#3784)

* refactor: Update staticCache to use oneDayInSeconds for sMaxAge and maxAge

* refactor: role updates

* style: first pass cursor

* style: Update nested list styles in style.css

* feat: setIsSubmitting to true in message handler to prevent edge case where submitting turns false during message stream

* feat: Add logic to redirect to conversation page after creating a new conversation

* refactor: Trim code string before copying in CodeBlock component

* feat: configSchema bookmarks and presets defaults

* feat: Update loadDefaultInterface to handle undefined config

* refactor: use  for compression check

* feat: first pass, query params

* fix: styling issues for prompt cards

* feat: anthropic prompt caching UI switch

* chore: Update static file cache control defaults/comments in .env.example

* ci: fix tests

* ci: fix tests

* chore:  use "submitting" class server error connection suspense fallback
This commit is contained in:
Danny Avila 2024-08-26 15:34:46 -04:00 committed by GitHub
parent bd701c197e
commit 5694ad4e55
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
31 changed files with 519 additions and 112 deletions

View file

@ -431,10 +431,10 @@ ALLOW_SHARED_LINKS_PUBLIC=true
# Static File Cache Control #
#==============================#
# Leave commented out to use default of 1 month for max-age and 1 week for s-maxage
# Leave commented out to use defaults: 1 day (86400 seconds) for s-maxage and 2 days (172800 seconds) for max-age
# NODE_ENV must be set to production for these to take effect
# STATIC_CACHE_MAX_AGE=604800
# STATIC_CACHE_S_MAX_AGE=259200
# STATIC_CACHE_MAX_AGE=172800
# STATIC_CACHE_S_MAX_AGE=86400
# If you have another service in front of your LibreChat doing compression, disable express based compression here
# DISABLE_COMPRESSION=true

View file

@ -94,7 +94,8 @@ class AnthropicClient extends BaseClient {
const modelMatch = matchModelName(this.modelOptions.model, EModelEndpoint.anthropic);
this.isClaude3 = modelMatch.startsWith('claude-3');
this.isLegacyOutput = !modelMatch.startsWith('claude-3-5-sonnet');
this.supportsCacheControl = this.checkPromptCacheSupport(modelMatch);
this.supportsCacheControl =
this.options.promptCache && this.checkPromptCacheSupport(modelMatch);
if (
this.isLegacyOutput &&
@ -821,6 +822,7 @@ class AnthropicClient extends BaseClient {
maxContextTokens: this.options.maxContextTokens,
promptPrefix: this.options.promptPrefix,
modelLabel: this.options.modelLabel,
promptCache: this.options.promptCache,
resendFiles: this.options.resendFiles,
iconURL: this.options.iconURL,
greeting: this.options.greeting,

View file

@ -206,7 +206,7 @@ describe('AnthropicClient', () => {
const modelOptions = {
model: 'claude-3-5-sonnet-20240307',
};
client.setOptions({ modelOptions });
client.setOptions({ modelOptions, promptCache: true });
const anthropicClient = client.getClient(modelOptions);
expect(anthropicClient._options.defaultHeaders).toBeDefined();
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
@ -220,7 +220,7 @@ describe('AnthropicClient', () => {
const modelOptions = {
model: 'claude-3-haiku-2028',
};
client.setOptions({ modelOptions });
client.setOptions({ modelOptions, promptCache: true });
const anthropicClient = client.getClient(modelOptions);
expect(anthropicClient._options.defaultHeaders).toBeDefined();
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');

View file

@ -76,46 +76,57 @@ const permissionSchemas = {
};
/**
* Updates access permissions for a specific role and permission type.
* Updates access permissions for a specific role and multiple permission types.
* @param {SystemRoles} roleName - The role to update.
* @param {PermissionTypes} permissionType - The type of permission to update.
* @param {Object.<Permissions, boolean>} permissions - Permissions to update and their values.
* @param {Object.<PermissionTypes, Object.<Permissions, boolean>>} permissionsUpdate - Permissions to update and their values.
*/
async function updateAccessPermissions(roleName, permissionType, _permissions) {
const permissions = removeNullishValues(_permissions);
if (Object.keys(permissions).length === 0) {
async function updateAccessPermissions(roleName, permissionsUpdate) {
const updates = {};
for (const [permissionType, permissions] of Object.entries(permissionsUpdate)) {
if (permissionSchemas[permissionType]) {
updates[permissionType] = removeNullishValues(permissions);
}
}
if (Object.keys(updates).length === 0) {
return;
}
try {
const role = await getRoleByName(roleName);
if (!role || !permissionSchemas[permissionType]) {
if (!role) {
return;
}
await updateRoleByName(roleName, {
[permissionType]: {
...role[permissionType],
...permissionSchemas[permissionType].partial().parse(permissions),
},
});
const updatedPermissions = {};
let hasChanges = false;
Object.entries(permissions).forEach(([permission, value]) =>
for (const [permissionType, permissions] of Object.entries(updates)) {
const currentPermissions = role[permissionType] || {};
updatedPermissions[permissionType] = { ...currentPermissions };
for (const [permission, value] of Object.entries(permissions)) {
if (currentPermissions[permission] !== value) {
updatedPermissions[permissionType][permission] = value;
hasChanges = true;
logger.info(
`Updated '${roleName}' role ${permissionType} '${permission}' permission to: ${value}`,
),
`Updating '${roleName}' role ${permissionType} '${permission}' permission from ${currentPermissions[permission]} to: ${value}`,
);
}
}
}
if (hasChanges) {
await updateRoleByName(roleName, updatedPermissions);
logger.info(`Updated '${roleName}' role permissions`);
} else {
logger.info(`No changes needed for '${roleName}' role permissions`);
}
} catch (error) {
logger.error(`Failed to update ${roleName} role ${permissionType} permissions:`, error);
logger.error(`Failed to update ${roleName} role permissions:`, error);
}
}
const updatePromptsAccess = (roleName, permissions) =>
updateAccessPermissions(roleName, PermissionTypes.PROMPTS, permissions);
const updateBookmarksAccess = (roleName, permissions) =>
updateAccessPermissions(roleName, PermissionTypes.BOOKMARKS, permissions);
/**
* Initialize default roles in the system.
* Creates the default roles (ADMIN, USER) if they don't exist in the database.
@ -138,6 +149,5 @@ module.exports = {
getRoleByName,
initializeRoles,
updateRoleByName,
updatePromptsAccess,
updateBookmarksAccess,
updateAccessPermissions,
};

197
api/models/Role.spec.js Normal file
View file

@ -0,0 +1,197 @@
const mongoose = require('mongoose');
const { MongoMemoryServer } = require('mongodb-memory-server');
const { SystemRoles, PermissionTypes } = require('librechat-data-provider');
const Role = require('~/models/schema/roleSchema');
const { updateAccessPermissions } = require('~/models/Role');
const getLogStores = require('~/cache/getLogStores');
// Mock the cache
jest.mock('~/cache/getLogStores', () => {
return jest.fn().mockReturnValue({
get: jest.fn(),
set: jest.fn(),
del: jest.fn(),
});
});
let mongoServer;
beforeAll(async () => {
mongoServer = await MongoMemoryServer.create();
const mongoUri = mongoServer.getUri();
await mongoose.connect(mongoUri);
});
afterAll(async () => {
await mongoose.disconnect();
await mongoServer.stop();
});
beforeEach(async () => {
await Role.deleteMany({});
getLogStores.mockClear();
});
describe('updateAccessPermissions', () => {
it('should update permissions when changes are needed', async () => {
await new Role({
name: SystemRoles.USER,
[PermissionTypes.PROMPTS]: {
CREATE: true,
USE: true,
SHARED_GLOBAL: false,
},
}).save();
await updateAccessPermissions(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: {
CREATE: true,
USE: true,
SHARED_GLOBAL: true,
},
});
const updatedRole = await Role.findOne({ name: SystemRoles.USER }).lean();
expect(updatedRole[PermissionTypes.PROMPTS]).toEqual({
CREATE: true,
USE: true,
SHARED_GLOBAL: true,
});
});
it('should not update permissions when no changes are needed', async () => {
await new Role({
name: SystemRoles.USER,
[PermissionTypes.PROMPTS]: {
CREATE: true,
USE: true,
SHARED_GLOBAL: false,
},
}).save();
await updateAccessPermissions(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: {
CREATE: true,
USE: true,
SHARED_GLOBAL: false,
},
});
const updatedRole = await Role.findOne({ name: SystemRoles.USER }).lean();
expect(updatedRole[PermissionTypes.PROMPTS]).toEqual({
CREATE: true,
USE: true,
SHARED_GLOBAL: false,
});
});
it('should handle non-existent roles', async () => {
await updateAccessPermissions('NON_EXISTENT_ROLE', {
[PermissionTypes.PROMPTS]: {
CREATE: true,
},
});
const role = await Role.findOne({ name: 'NON_EXISTENT_ROLE' });
expect(role).toBeNull();
});
it('should update only specified permissions', async () => {
await new Role({
name: SystemRoles.USER,
[PermissionTypes.PROMPTS]: {
CREATE: true,
USE: true,
SHARED_GLOBAL: false,
},
}).save();
await updateAccessPermissions(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: {
SHARED_GLOBAL: true,
},
});
const updatedRole = await Role.findOne({ name: SystemRoles.USER }).lean();
expect(updatedRole[PermissionTypes.PROMPTS]).toEqual({
CREATE: true,
USE: true,
SHARED_GLOBAL: true,
});
});
it('should handle partial updates', async () => {
await new Role({
name: SystemRoles.USER,
[PermissionTypes.PROMPTS]: {
CREATE: true,
USE: true,
SHARED_GLOBAL: false,
},
}).save();
await updateAccessPermissions(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: {
USE: false,
},
});
const updatedRole = await Role.findOne({ name: SystemRoles.USER }).lean();
expect(updatedRole[PermissionTypes.PROMPTS]).toEqual({
CREATE: true,
USE: false,
SHARED_GLOBAL: false,
});
});
it('should update multiple permission types at once', async () => {
await new Role({
name: SystemRoles.USER,
[PermissionTypes.PROMPTS]: {
CREATE: true,
USE: true,
SHARED_GLOBAL: false,
},
[PermissionTypes.BOOKMARKS]: {
USE: true,
},
}).save();
await updateAccessPermissions(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { USE: false, SHARED_GLOBAL: true },
[PermissionTypes.BOOKMARKS]: { USE: false },
});
const updatedRole = await Role.findOne({ name: SystemRoles.USER }).lean();
expect(updatedRole[PermissionTypes.PROMPTS]).toEqual({
CREATE: true,
USE: false,
SHARED_GLOBAL: true,
});
expect(updatedRole[PermissionTypes.BOOKMARKS]).toEqual({
USE: false,
});
});
it('should handle updates for a single permission type', async () => {
await new Role({
name: SystemRoles.USER,
[PermissionTypes.PROMPTS]: {
CREATE: true,
USE: true,
SHARED_GLOBAL: false,
},
}).save();
await updateAccessPermissions(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { USE: false, SHARED_GLOBAL: true },
});
const updatedRole = await Role.findOne({ name: SystemRoles.USER }).lean();
expect(updatedRole[PermissionTypes.PROMPTS]).toEqual({
CREATE: true,
USE: false,
SHARED_GLOBAL: true,
});
});
});

View file

@ -74,6 +74,10 @@ const conversationPreset = {
resendImages: {
type: Boolean,
},
/* Anthropic only */
promptCache: {
type: Boolean,
},
// files
resendFiles: {
type: Boolean,

View file

@ -16,9 +16,9 @@ const validateImageRequest = require('./middleware/validateImageRequest');
const errorController = require('./controllers/ErrorController');
const configureSocialLogins = require('./socialLogins');
const AppService = require('./services/AppService');
const staticCache = require('./utils/staticCache');
const noIndex = require('./middleware/noIndex');
const routes = require('./routes');
const staticCache = require('./utils/staticCache');
const { PORT, HOST, ALLOW_SOCIAL_LOGIN, DISABLE_COMPRESSION } = process.env ?? {};
@ -51,7 +51,7 @@ const startServer = async () => {
app.set('trust proxy', 1); /* trust first proxy */
app.use(cors());
if (DISABLE_COMPRESSION !== 'true') {
if (!isEnabled(DISABLE_COMPRESSION)) {
app.use(compression());
}

View file

@ -1,6 +1,6 @@
jest.mock('~/models/Role', () => ({
initializeRoles: jest.fn(),
updatePromptsAccess: jest.fn(),
updateAccessPermissions: jest.fn(),
getRoleByName: jest.fn(),
updateRoleByName: jest.fn(),
}));
@ -30,7 +30,7 @@ jest.mock('./start/checks', () => ({
const AppService = require('./AppService');
const { loadDefaultInterface } = require('./start/interface');
describe('AppService interface.prompts configuration', () => {
describe('AppService interface configuration', () => {
let app;
let mockLoadCustomConfig;
@ -41,33 +41,47 @@ describe('AppService interface.prompts configuration', () => {
mockLoadCustomConfig = require('./Config/loadCustomConfig');
});
it('should set prompts to true when loadDefaultInterface returns true', async () => {
it('should set prompts and bookmarks to true when loadDefaultInterface returns true for both', async () => {
mockLoadCustomConfig.mockResolvedValue({});
loadDefaultInterface.mockResolvedValue({ prompts: true });
loadDefaultInterface.mockResolvedValue({ prompts: true, bookmarks: true });
await AppService(app);
expect(app.locals.interfaceConfig.prompts).toBe(true);
expect(app.locals.interfaceConfig.bookmarks).toBe(true);
expect(loadDefaultInterface).toHaveBeenCalled();
});
it('should set prompts to false when loadDefaultInterface returns false', async () => {
mockLoadCustomConfig.mockResolvedValue({ interface: { prompts: false } });
loadDefaultInterface.mockResolvedValue({ prompts: false });
it('should set prompts and bookmarks to false when loadDefaultInterface returns false for both', async () => {
mockLoadCustomConfig.mockResolvedValue({ interface: { prompts: false, bookmarks: false } });
loadDefaultInterface.mockResolvedValue({ prompts: false, bookmarks: false });
await AppService(app);
expect(app.locals.interfaceConfig.prompts).toBe(false);
expect(app.locals.interfaceConfig.bookmarks).toBe(false);
expect(loadDefaultInterface).toHaveBeenCalled();
});
it('should not set prompts when loadDefaultInterface returns undefined', async () => {
it('should not set prompts and bookmarks when loadDefaultInterface returns undefined for both', async () => {
mockLoadCustomConfig.mockResolvedValue({});
loadDefaultInterface.mockResolvedValue({});
await AppService(app);
expect(app.locals.interfaceConfig.prompts).toBeUndefined();
expect(app.locals.interfaceConfig.bookmarks).toBeUndefined();
expect(loadDefaultInterface).toHaveBeenCalled();
});
it('should set prompts and bookmarks to different values when loadDefaultInterface returns different values', async () => {
mockLoadCustomConfig.mockResolvedValue({ interface: { prompts: true, bookmarks: false } });
loadDefaultInterface.mockResolvedValue({ prompts: true, bookmarks: false });
await AppService(app);
expect(app.locals.interfaceConfig.prompts).toBe(true);
expect(app.locals.interfaceConfig.bookmarks).toBe(false);
expect(loadDefaultInterface).toHaveBeenCalled();
});
});

View file

@ -23,8 +23,7 @@ jest.mock('./Files/Firebase/initialize', () => ({
}));
jest.mock('~/models/Role', () => ({
initializeRoles: jest.fn(),
updatePromptsAccess: jest.fn(),
updateBookmarksAccess: jest.fn(),
updateAccessPermissions: jest.fn(),
}));
jest.mock('./ToolService', () => ({
loadAndFormatTools: jest.fn().mockReturnValue({

View file

@ -6,6 +6,7 @@ const buildOptions = (endpoint, parsedBody) => {
promptPrefix,
maxContextTokens,
resendFiles = true,
promptCache = true,
iconURL,
greeting,
spec,
@ -17,6 +18,7 @@ const buildOptions = (endpoint, parsedBody) => {
modelLabel,
promptPrefix,
resendFiles,
promptCache,
iconURL,
greeting,
spec,

View file

@ -1,5 +1,10 @@
const { SystemRoles, Permissions, removeNullishValues } = require('librechat-data-provider');
const { updatePromptsAccess, updateBookmarksAccess } = require('~/models/Role');
const {
SystemRoles,
Permissions,
PermissionTypes,
removeNullishValues,
} = require('librechat-data-provider');
const { updateAccessPermissions } = require('~/models/Role');
const { logger } = require('~/config');
/**
@ -28,8 +33,10 @@ async function loadDefaultInterface(config, configDefaults, roleName = SystemRol
prompts: interfaceConfig?.prompts ?? defaults.prompts,
});
await updatePromptsAccess(roleName, { [Permissions.USE]: loadedInterface.prompts });
await updateBookmarksAccess(roleName, { [Permissions.USE]: loadedInterface.bookmarks });
await updateAccessPermissions(roleName, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: loadedInterface.prompts },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: loadedInterface.bookmarks },
});
let i = 0;
const logSettings = () => {

View file

@ -1,52 +1,81 @@
const { SystemRoles, Permissions } = require('librechat-data-provider');
const { updatePromptsAccess } = require('~/models/Role');
const { SystemRoles, Permissions, PermissionTypes } = require('librechat-data-provider');
const { updateAccessPermissions } = require('~/models/Role');
const { loadDefaultInterface } = require('./interface');
jest.mock('~/models/Role', () => ({
updatePromptsAccess: jest.fn(),
updateBookmarksAccess: jest.fn(),
updateAccessPermissions: jest.fn(),
}));
describe('loadDefaultInterface', () => {
it('should call updatePromptsAccess with the correct parameters when prompts is true', async () => {
const config = { interface: { prompts: true } };
it('should call updateAccessPermissions with the correct parameters when prompts and bookmarks are true', async () => {
const config = { interface: { prompts: true, bookmarks: true } };
const configDefaults = { interface: {} };
await loadDefaultInterface(config, configDefaults);
expect(updatePromptsAccess).toHaveBeenCalledWith(SystemRoles.USER, { [Permissions.USE]: true });
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: true },
});
});
it('should call updatePromptsAccess with false when prompts is false', async () => {
const config = { interface: { prompts: false } };
it('should call updateAccessPermissions with false when prompts and bookmarks are false', async () => {
const config = { interface: { prompts: false, bookmarks: false } };
const configDefaults = { interface: {} };
await loadDefaultInterface(config, configDefaults);
expect(updatePromptsAccess).toHaveBeenCalledWith(SystemRoles.USER, {
[Permissions.USE]: false,
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: false },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: false },
});
});
it('should call updatePromptsAccess with undefined when prompts is not specified in config', async () => {
it('should call updateAccessPermissions with undefined when prompts and bookmarks are not specified in config', async () => {
const config = {};
const configDefaults = { interface: {} };
await loadDefaultInterface(config, configDefaults);
expect(updatePromptsAccess).toHaveBeenCalledWith(SystemRoles.USER, {
[Permissions.USE]: undefined,
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: undefined },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
});
});
it('should call updatePromptsAccess with undefined when prompts is explicitly undefined', async () => {
const config = { interface: { prompts: undefined } };
it('should call updateAccessPermissions with undefined when prompts and bookmarks are explicitly undefined', async () => {
const config = { interface: { prompts: undefined, bookmarks: undefined } };
const configDefaults = { interface: {} };
await loadDefaultInterface(config, configDefaults);
expect(updatePromptsAccess).toHaveBeenCalledWith(SystemRoles.USER, {
[Permissions.USE]: undefined,
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: undefined },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
});
});
it('should call updateAccessPermissions with mixed values for prompts and bookmarks', async () => {
const config = { interface: { prompts: true, bookmarks: false } };
const configDefaults = { interface: {} };
await loadDefaultInterface(config, configDefaults);
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: false },
});
});
it('should call updateAccessPermissions with true when config is undefined', async () => {
const config = undefined;
const configDefaults = { interface: { prompts: true, bookmarks: true } };
await loadDefaultInterface(config, configDefaults);
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: true },
});
});
});

View file

@ -1,9 +1,9 @@
const express = require('express');
const oneWeekInSeconds = 24 * 60 * 60 * 7;
const oneDayInSeconds = 24 * 60 * 60;
const sMaxAge = process.env.STATIC_CACHE_S_MAX_AGE || oneWeekInSeconds;
const maxAge = process.env.STATIC_CACHE_MAX_AGE || oneWeekInSeconds * 4;
const sMaxAge = process.env.STATIC_CACHE_S_MAX_AGE || oneDayInSeconds;
const maxAge = process.env.STATIC_CACHE_MAX_AGE || oneDayInSeconds * 2;
const staticCache = (staticPath) =>
express.static(staticPath, {

View file

@ -17,6 +17,7 @@ import {
useAutoSave,
useRequiresKey,
useHandleKeyUp,
useQueryParams,
useSubmitMessage,
} from '~/hooks';
import { TextareaAutosize } from '~/components/ui';
@ -37,6 +38,7 @@ import store from '~/store';
const ChatForm = ({ index = 0 }) => {
const submitButtonRef = useRef<HTMLButtonElement>(null);
const textAreaRef = useRef<HTMLTextAreaElement | null>(null);
useQueryParams({ textAreaRef });
const SpeechToText = useRecoilValue(store.speechToText);
const TextToSpeech = useRecoilValue(store.textToSpeech);
@ -61,7 +63,7 @@ const ChatForm = ({ index = 0 }) => {
const { handlePaste, handleKeyDown, handleCompositionStart, handleCompositionEnd } = useTextarea({
textAreaRef,
submitButtonRef,
disabled: !!requiresKey,
disabled: !!(requiresKey ?? false),
});
const {
@ -105,12 +107,12 @@ const ChatForm = ({ index = 0 }) => {
const invalidAssistant = useMemo(
() =>
isAssistantsEndpoint(conversation?.endpoint) &&
(!conversation?.assistant_id ||
!assistantMap[conversation.endpoint ?? ''][conversation.assistant_id ?? '']),
(!(conversation?.assistant_id ?? '') ||
!assistantMap?.[conversation?.endpoint ?? ''][conversation?.assistant_id ?? '']),
[conversation?.assistant_id, conversation?.endpoint, assistantMap],
);
const disableInputs = useMemo(
() => !!(requiresKey || invalidAssistant),
() => !!((requiresKey ?? false) || invalidAssistant),
[requiresKey, invalidAssistant],
);
@ -162,6 +164,8 @@ const ChatForm = ({ index = 0 }) => {
{endpoint && (
<TextareaAutosize
{...registerProps}
// TODO: remove autofocus due to a11y issues
// eslint-disable-next-line jsx-a11y/no-autofocus
autoFocus
ref={(e) => {
ref(e);

View file

@ -25,7 +25,7 @@ export const ErrorMessage = ({
<div className="text-message mb-[0.625rem] flex min-h-[20px] flex-col items-start gap-3 overflow-x-auto">
<div className="markdown prose dark:prose-invert light w-full break-words dark:text-gray-100">
<div className="absolute">
<p className="relative">
<p className="submitting relative">
<span className="result-thinking" />
</p>
</div>

View file

@ -29,6 +29,7 @@ export default function Settings({ conversation, setOption, models, readonly }:
maxOutputTokens,
maxContextTokens,
resendFiles,
promptCache,
} = conversation ?? {};
const [setMaxContextTokens, maxContextTokensValue] = useDebouncedInput<number | null | undefined>(
{
@ -47,6 +48,7 @@ export default function Settings({ conversation, setOption, models, readonly }:
const setTopP = setOption('topP');
const setTopK = setOption('topK');
const setResendFiles = setOption('resendFiles');
const setPromptCache = setOption('promptCache');
const setModel = (newModel: string) => {
const modelSetter = setOption('model');
@ -188,7 +190,7 @@ export default function Settings({ conversation, setOption, models, readonly }:
className="flex h-4 w-full"
/>
</HoverCardTrigger>
<OptionHover endpoint={conversation?.endpoint ?? ''} type="temp" side={ESide.Left} />
<OptionHover endpoint={conversation.endpoint ?? ''} type="temp" side={ESide.Left} />
</HoverCard>
<HoverCard openDelay={300}>
<HoverCardTrigger className="grid w-full items-center gap-2">
@ -228,7 +230,7 @@ export default function Settings({ conversation, setOption, models, readonly }:
className="flex h-4 w-full"
/>
</HoverCardTrigger>
<OptionHover endpoint={conversation?.endpoint ?? ''} type="topp" side={ESide.Left} />
<OptionHover endpoint={conversation.endpoint ?? ''} type="topp" side={ESide.Left} />
</HoverCard>
<HoverCard openDelay={300}>
@ -269,7 +271,7 @@ export default function Settings({ conversation, setOption, models, readonly }:
className="flex h-4 w-full"
/>
</HoverCardTrigger>
<OptionHover endpoint={conversation?.endpoint ?? ''} type="topk" side={ESide.Left} />
<OptionHover endpoint={conversation.endpoint ?? ''} type="topk" side={ESide.Left} />
</HoverCard>
<HoverCard openDelay={300}>
<HoverCardTrigger className="grid w-full items-center gap-2">
@ -310,7 +312,7 @@ export default function Settings({ conversation, setOption, models, readonly }:
/>
</HoverCardTrigger>
<OptionHover
endpoint={conversation?.endpoint ?? ''}
endpoint={conversation.endpoint ?? ''}
type="maxoutputtokens"
side={ESide.Left}
/>
@ -329,13 +331,34 @@ export default function Settings({ conversation, setOption, models, readonly }:
className="flex"
/>
<OptionHover
endpoint={conversation?.endpoint ?? ''}
endpoint={conversation.endpoint ?? ''}
type="resend"
side={ESide.Bottom}
/>
</div>
</HoverCardTrigger>
</HoverCard>
<HoverCard openDelay={500}>
<HoverCardTrigger className="grid w-full">
<div className="flex justify-between">
<Label htmlFor="prompt-cache" className="text-left text-sm font-medium">
{localize('com_endpoint_prompt_cache')}{' '}
</Label>
<Switch
id="prompt-cache"
checked={promptCache ?? true}
onCheckedChange={(checked: boolean) => setPromptCache(checked)}
disabled={readonly}
className="flex"
/>
<OptionHover
endpoint={conversation.endpoint ?? ''}
type="promptcache"
side={ESide.Bottom}
/>
</div>
</HoverCardTrigger>
</HoverCard>
</div>
</div>
);

View file

@ -26,6 +26,7 @@ const types = {
topk: 'com_endpoint_anthropic_topk',
maxoutputtokens: 'com_endpoint_anthropic_maxoutputtokens',
resend: openAI.resend,
promptcache: 'com_endpoint_anthropic_prompt_cache',
},
google: {
temp: 'com_endpoint_google_temp',
@ -44,7 +45,7 @@ const types = {
function OptionHover({ endpoint, type, side }: TOptionHoverProps) {
const localize = useLocalize();
const text = types?.[endpoint]?.[type];
const text = types[endpoint]?.[type];
if (!text) {
return null;
}

View file

@ -37,7 +37,7 @@ const CodeBar: React.FC<CodeBarProps> = React.memo(({ lang, codeRef, error, plug
const codeString = codeRef.current?.textContent;
if (codeString != null) {
setIsCopied(true);
copy(codeString, { format: 'text/plain' });
copy(codeString.trim(), { format: 'text/plain' });
setTimeout(() => {
setIsCopied(false);
@ -70,16 +70,17 @@ const CodeBlock: React.FC<CodeBlockProps> = ({
error,
}) => {
const codeRef = useRef<HTMLElement>(null);
const language = plugin || error ? 'json' : lang;
const isNonCode = !!(plugin === true || error === true);
const language = isNonCode ? 'json' : lang;
return (
<div className="w-full rounded-md bg-gray-900 text-xs text-white/80">
<CodeBar lang={lang} codeRef={codeRef} plugin={!!plugin} error={error} />
<CodeBar lang={lang} codeRef={codeRef} plugin={plugin === true} error={error} />
<div className={cn(classProp, 'overflow-y-auto p-4')}>
<code
ref={codeRef}
className={cn(
plugin || error ? '!whitespace-pre-wrap' : `hljs language-${language} !whitespace-pre`,
isNonCode ? '!whitespace-pre-wrap' : `hljs language-${language} !whitespace-pre`,
)}
>
{codeChildren}

View file

@ -61,7 +61,7 @@ export default function DashGroupItem({
};
const saveRename = () => {
updateGroup.mutate({ payload: { name: nameInputField }, id: group._id || '' });
updateGroup.mutate({ payload: { name: nameInputField }, id: group._id ?? '' });
};
const handleBlur = () => {
@ -77,13 +77,13 @@ export default function DashGroupItem({
}
};
const handleRename = (e: React.MouseEvent | React.KeyboardEvent) => {
const handleRename = (e: Event) => {
e.stopPropagation();
setNameEditFlag(true);
};
const handleDelete = () => {
deletePromptGroupMutation.mutate({ id: group._id || '' });
deletePromptGroupMutation.mutate({ id: group._id ?? '' });
};
return (
@ -156,7 +156,7 @@ export default function DashGroupItem({
</h3>
</div>
<div className="flex flex-row items-center gap-1">
{groupIsGlobal && (
{groupIsGlobal === true && (
<EarthIcon
className="icon-md text-green-400"
aria-label={localize('com_ui_global_group')}
@ -230,7 +230,7 @@ export default function DashGroupItem({
</div>
</div>
<div className="ellipsis text-balance text-sm text-gray-600 dark:text-gray-400">
{group.oneliner ? group.oneliner : group.productionPrompt?.prompt ?? ''}
{group.oneliner ?? '' ? group.oneliner : group.productionPrompt?.prompt ?? ''}
</div>
</>
)}

View file

@ -39,7 +39,7 @@ export default function List({
</div>
)}
<div className="flex-grow overflow-y-auto">
<div className="overflow-y-auto">
<div className="overflow-y-auto overflow-x-hidden">
{isLoading && isChatRoute && (
<Skeleton className="my-2 flex h-[84px] w-full rounded-2xl border-0 px-3 pb-4 pt-3" />
)}

View file

@ -28,7 +28,9 @@ export default function ListCard({
</div>
<div>{children}</div>
</div>
<div className="ellipsis select-none text-balance text-sm text-text-secondary">{snippet}</div>
<div className="ellipsis max-w-full select-none text-balance text-sm text-text-secondary">
{snippet}
</div>
</button>
);
}

View file

@ -3,6 +3,7 @@ export { default as useUserKey } from './useUserKey';
export { default as useDebounce } from './useDebounce';
export { default as useTextarea } from './useTextarea';
export { default as useCombobox } from './useCombobox';
export { default as useQueryParams } from './useQueryParams';
export { default as useHandleKeyUp } from './useHandleKeyUp';
export { default as useRequiresKey } from './useRequiresKey';
export { default as useMultipleKeys } from './useMultipleKeys';

View file

@ -0,0 +1,66 @@
import { useEffect, useRef } from 'react';
import { useSearchParams } from 'react-router-dom';
import { useChatFormContext } from '~/Providers';
export default function useQueryParams({
textAreaRef,
}: {
textAreaRef: React.RefObject<HTMLTextAreaElement>;
}) {
const methods = useChatFormContext();
const [searchParams] = useSearchParams();
const attemptsRef = useRef(0);
const processedRef = useRef(false);
const maxAttempts = 50; // 5 seconds maximum (50 * 100ms)
useEffect(() => {
const promptParam = searchParams.get('prompt');
if (!promptParam) {
return;
}
const decodedPrompt = decodeURIComponent(promptParam);
const intervalId = setInterval(() => {
// If already processed or max attempts reached, clear interval and stop
if (processedRef.current || attemptsRef.current >= maxAttempts) {
clearInterval(intervalId);
if (attemptsRef.current >= maxAttempts) {
console.warn('Max attempts reached, failed to process prompt');
}
return;
}
attemptsRef.current += 1;
if (textAreaRef.current) {
const currentText = methods.getValues('text');
// Only update if the textarea is empty
if (!currentText) {
methods.setValue('text', decodedPrompt, { shouldValidate: true });
textAreaRef.current.focus();
textAreaRef.current.setSelectionRange(decodedPrompt.length, decodedPrompt.length);
// Remove the 'prompt' parameter from the URL
searchParams.delete('prompt');
const newUrl = `${window.location.pathname}${
searchParams.toString() ? `?${searchParams.toString()}` : ''
}`;
window.history.replaceState({}, '', newUrl);
processedRef.current = true;
console.log('Prompt processed successfully');
}
clearInterval(intervalId);
}
}, 100); // Check every 100ms
// Clean up the interval on unmount
return () => {
clearInterval(intervalId);
console.log('Cleanup: interval cleared');
};
}, [searchParams, methods, textAreaRef]);
}

View file

@ -86,6 +86,7 @@ export default function useEventHandlers({
isRegenerate = false,
} = submission;
const text = data ?? '';
setIsSubmitting(true);
if (text.length > 0) {
announcePolite({
message: text,
@ -118,7 +119,7 @@ export default function useEventHandlers({
]);
}
},
[setMessages, announcePolite],
[setMessages, announcePolite, setIsSubmitting],
);
const cancelHandler = useCallback(
@ -387,6 +388,10 @@ export default function useEventHandlers({
}
if (setConversation && isAddedRequest !== true) {
if (window.location.pathname === '/c/new') {
window.history.pushState({}, '', '/c/' + conversation.conversationId);
}
setConversation((prevState) => {
const update = {
...prevState,

View file

@ -472,6 +472,9 @@ export default {
'Top-k changes how the model selects tokens for output. A top-k of 1 means the selected token is the most probable among all tokens in the model\'s vocabulary (also called greedy decoding), while a top-k of 3 means that the next token is selected from among the 3 most probable tokens (using temperature).',
com_endpoint_anthropic_maxoutputtokens:
'Maximum number of tokens that can be generated in the response. Specify a lower value for shorter responses and a higher value for longer responses. Note: models may stop before reaching this maximum.',
com_endpoint_anthropic_prompt_cache:
'Prompt caching allows reusing large context or instructions across API calls, reducing costs and latency',
com_endpoint_prompt_cache: 'Use Prompt Caching',
com_endpoint_anthropic_custom_name_placeholder: 'Set a custom name for Anthropic',
com_endpoint_frequency_penalty: 'Frequency Penalty',
com_endpoint_presence_penalty: 'Presence Penalty',

View file

@ -1776,9 +1776,7 @@ button.scroll-convo {
}
.result-streaming > :not(ol):not(ul):not(pre):last-child:after,
.result-streaming > pre:last-child code:after,
.result-streaming > ol:last-child > li:last-child:after,
.result-streaming > ul:last-child > li:last-child:after {
.result-streaming > pre:last-child code:after {
display: inline-block;
content: '⬤';
width: 12px;
@ -1792,9 +1790,7 @@ button.scroll-convo {
}
@supports (selector(:has(*))) {
.result-streaming > :not(ol):not(ul):last-child:after,
.result-streaming > ol:last-child > li:last-child:not(:has(ol)):not(:has(ul)):after,
.result-streaming > ul:last-child > li:last-child:not(:has(ol)):not(:has(ul)):after {
.result-streaming > :is(ul, ol):last-child > li:last-child:not(:has(> :is(ul, ol, pre))):after {
content: '⬤';
font-family: system-ui, Inter, Söhne Circle, -apple-system, Segoe UI, Roboto, Ubuntu, Cantarell,
Noto Sans, sans-serif;
@ -1807,8 +1803,8 @@ button.scroll-convo {
height: 12px;
}
}
@supports not (selector(:has(*))) {
.result-streaming > :not(ol):not(ul):last-child:after,
.result-streaming > ol:last-child > li:last-child:after,
.result-streaming > ul:last-child > li:last-child:after {
content: '⬤';
@ -1822,6 +1818,21 @@ button.scroll-convo {
width: 12px;
height: 12px;
}
.result-streaming > ol:last-child > li:last-child > :is(ul, ol, pre) ~ :after,
.result-streaming > ul:last-child > li:last-child > :is(ul, ol, pre) ~ :after {
display: none;
}
.result-streaming > ol:last-child > li:last-child > pre:last-child code:after,
.result-streaming > ul:last-child > li:last-child > pre:last-child code:after {
display: inline-block;
}
}
/* Remove cursors when streaming is complete */
.result-streaming:not(.submitting) :is(ul, ol) li:after {
display: none !important;
}
.webkit-dark-styles,
@ -1907,8 +1918,10 @@ button.scroll-convo {
}
/* Base styles for lists */
.prose ol, .prose ul,
.markdown ol, .markdown ul {
.prose ol,
.prose ul,
.markdown ol,
.markdown ul {
list-style-position: outside;
margin-top: 1em;
margin-bottom: 1em;
@ -1979,8 +1992,14 @@ button.scroll-convo {
}
/* Nested lists */
.prose ol ol, .prose ul ul, .prose ul ol, .prose ol ul,
.markdown ol ol, .markdown ul ul, .markdown ul ol, .markdown ol ul {
.prose ol ol,
.prose ul ul,
.prose ul ol,
.prose ol ul,
.markdown ol ol,
.markdown ul ul,
.markdown ul ol,
.markdown ol ul {
margin-top: 0.75em;
margin-bottom: 0.75em;
}

2
package-lock.json generated
View file

@ -31702,7 +31702,7 @@
},
"packages/data-provider": {
"name": "librechat-data-provider",
"version": "0.7.416",
"version": "0.7.417",
"license": "ISC",
"dependencies": {
"@types/js-yaml": "^4.0.9",

View file

@ -1,6 +1,6 @@
{
"name": "librechat-data-provider",
"version": "0.7.416",
"version": "0.7.417",
"description": "data services for librechat apps",
"main": "dist/index.js",
"module": "dist/index.es.js",

View file

@ -423,6 +423,8 @@ export const configSchema = z.object({
parameters: true,
sidePanel: true,
presets: true,
bookmarks: true,
prompts: true,
}),
fileStrategy: fileSourceSchema.default(FileSources.local),
registration: z

View file

@ -171,6 +171,9 @@ export const anthropicSettings = {
step: 0.01,
default: 1,
},
promptCache: {
default: true,
},
maxOutputTokens: {
min: 1,
max: ANTHROPIC_MAX_OUTPUT,
@ -393,6 +396,8 @@ export const tConversationSchema = z.object({
file_ids: z.array(z.string()).optional(),
maxContextTokens: coerceNumber.optional(),
max_tokens: coerceNumber.optional(),
/* Anthropic */
promptCache: z.boolean().optional(),
/* vision */
resendFiles: z.boolean().optional(),
imageDetail: eImageDetailSchema.optional(),
@ -648,6 +653,7 @@ export const anthropicSchema = tConversationSchema
topP: true,
topK: true,
resendFiles: true,
promptCache: true,
iconURL: true,
greeting: true,
spec: true,
@ -664,6 +670,10 @@ export const anthropicSchema = tConversationSchema
maxOutputTokens: obj.maxOutputTokens ?? anthropicSettings.maxOutputTokens.reset(model),
topP: obj.topP ?? anthropicSettings.topP.default,
topK: obj.topK ?? anthropicSettings.topK.default,
promptCache:
typeof obj.promptCache === 'boolean'
? obj.promptCache
: anthropicSettings.promptCache.default,
resendFiles:
typeof obj.resendFiles === 'boolean'
? obj.resendFiles
@ -683,6 +693,7 @@ export const anthropicSchema = tConversationSchema
topP: anthropicSettings.topP.default,
topK: anthropicSettings.topK.default,
resendFiles: anthropicSettings.resendFiles.default,
promptCache: anthropicSettings.promptCache.default,
iconURL: undefined,
greeting: undefined,
spec: undefined,
@ -911,6 +922,7 @@ export const compactAnthropicSchema = tConversationSchema
topP: true,
topK: true,
resendFiles: true,
promptCache: true,
iconURL: true,
greeting: true,
spec: true,
@ -933,6 +945,9 @@ export const compactAnthropicSchema = tConversationSchema
if (newObj.resendFiles === anthropicSettings.resendFiles.default) {
delete newObj.resendFiles;
}
if (newObj.promptCache === anthropicSettings.promptCache.default) {
delete newObj.promptCache;
}
return removeNullishValues(newObj);
})

View file

@ -26,6 +26,7 @@ export type TEndpointOption = {
endpointType?: EModelEndpoint;
modelDisplayLabel?: string;
resendFiles?: boolean;
promptCache?: boolean;
maxContextTokens?: number;
imageDetail?: ImageDetail;
model?: string | null;