Merge branch 'main' into feat/E2EE

This commit is contained in:
Ruben Talstra 2025-03-05 10:50:49 +01:00 committed by GitHub
commit 40e59bc55c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
206 changed files with 14792 additions and 3465 deletions

View file

@ -22,8 +22,8 @@ export type ParametersSchema = {
export type OpenAPISchema = OpenAPIV3.SchemaObject &
ParametersSchema & {
items?: OpenAPIV3.ReferenceObject | OpenAPIV3.SchemaObject;
};
items?: OpenAPIV3.ReferenceObject | OpenAPIV3.SchemaObject;
};
export type ApiKeyCredentials = {
api_key: string;
@ -43,8 +43,8 @@ export type Credentials = ApiKeyCredentials | OAuthCredentials;
type MediaTypeObject =
| undefined
| {
[media: string]: OpenAPIV3.MediaTypeObject | undefined;
};
[media: string]: OpenAPIV3.MediaTypeObject | undefined;
};
type RequestBodyObject = Omit<OpenAPIV3.RequestBodyObject, 'content'> & {
content: MediaTypeObject;
@ -358,19 +358,29 @@ export class ActionRequest {
}
}
export function resolveRef(
schema: OpenAPIV3.SchemaObject | OpenAPIV3.ReferenceObject | RequestBodyObject,
components?: OpenAPIV3.ComponentsObject,
): OpenAPIV3.SchemaObject {
if ('$ref' in schema && components) {
const refPath = schema.$ref.replace(/^#\/components\/schemas\//, '');
const resolvedSchema = components.schemas?.[refPath];
if (!resolvedSchema) {
throw new Error(`Reference ${schema.$ref} not found`);
export function resolveRef<
T extends
| OpenAPIV3.ReferenceObject
| OpenAPIV3.SchemaObject
| OpenAPIV3.ParameterObject
| OpenAPIV3.RequestBodyObject,
>(obj: T, components?: OpenAPIV3.ComponentsObject): Exclude<T, OpenAPIV3.ReferenceObject> {
if ('$ref' in obj && components) {
const refPath = obj.$ref.replace(/^#\/components\//, '').split('/');
let resolved: unknown = components as Record<string, unknown>;
for (const segment of refPath) {
if (typeof resolved === 'object' && resolved !== null && segment in resolved) {
resolved = (resolved as Record<string, unknown>)[segment];
} else {
throw new Error(`Could not resolve reference: ${obj.$ref}`);
}
}
return resolveRef(resolvedSchema, components);
return resolveRef(resolved as typeof obj, components) as Exclude<T, OpenAPIV3.ReferenceObject>;
}
return schema as OpenAPIV3.SchemaObject;
return obj as Exclude<T, OpenAPIV3.ReferenceObject>;
}
function sanitizeOperationId(input: string) {
@ -399,7 +409,7 @@ export function openapiToFunction(
const operationObj = operation as OpenAPIV3.OperationObject & {
'x-openai-isConsequential'?: boolean;
} & {
'x-strict'?: boolean
'x-strict'?: boolean;
};
// Operation ID is used as the function name
@ -415,15 +425,25 @@ export function openapiToFunction(
};
if (operationObj.parameters) {
for (const param of operationObj.parameters) {
const paramObj = param as OpenAPIV3.ParameterObject;
const resolvedSchema = resolveRef(
{ ...paramObj.schema } as OpenAPIV3.ReferenceObject | OpenAPIV3.SchemaObject,
for (const param of operationObj.parameters ?? []) {
const resolvedParam = resolveRef(
param,
openapiSpec.components,
);
parametersSchema.properties[paramObj.name] = resolvedSchema;
if (paramObj.required === true) {
parametersSchema.required.push(paramObj.name);
) as OpenAPIV3.ParameterObject;
const paramName = resolvedParam.name;
if (!paramName || !resolvedParam.schema) {
continue;
}
const paramSchema = resolveRef(
resolvedParam.schema,
openapiSpec.components,
) as OpenAPIV3.SchemaObject;
parametersSchema.properties[paramName] = paramSchema;
if (resolvedParam.required) {
parametersSchema.required.push(paramName);
}
}
}
@ -446,7 +466,12 @@ export function openapiToFunction(
}
}
const functionSignature = new FunctionSignature(operationId, description, parametersSchema, isStrict);
const functionSignature = new FunctionSignature(
operationId,
description,
parametersSchema,
isStrict,
);
functionSignatures.push(functionSignature);
const actionRequest = new ActionRequest(
@ -544,4 +569,4 @@ export function validateAndParseOpenAPISpec(specString: string): ValidationResul
console.error(error);
return { status: false, message: 'Error parsing OpenAPI spec.' };
}
}
}

View file

@ -238,4 +238,14 @@ export const userTerms = () => '/api/user/terms';
export const acceptUserTerms = () => '/api/user/terms/accept';
export const banner = () => '/api/banner';
export const encryption = () => '/api/user/encryption';
// Two-Factor Endpoints
export const enableTwoFactor = () => '/api/auth/2fa/enable';
export const verifyTwoFactor = () => '/api/auth/2fa/verify';
export const confirmTwoFactor = () => '/api/auth/2fa/confirm';
export const disableTwoFactor = () => '/api/auth/2fa/disable';
export const regenerateBackupCodes = () => '/api/auth/2fa/backup/regenerate';
export const verifyTwoFactorTemp = () => '/api/auth/2fa/verify-temp';

View file

@ -6,8 +6,9 @@ import type {
TValidatedAzureConfig,
TAzureConfigValidationResult,
} from '../src/config';
import { errorsToString, extractEnvVariable, envVarRegex } from '../src/parsers';
import { extractEnvVariable, envVarRegex } from '../src/utils';
import { azureGroupConfigsSchema } from '../src/config';
import { errorsToString } from '../src/parsers';
export const deprecatedAzureVariables = [
/* "related to" precedes description text */

View file

@ -1,6 +1,20 @@
import { z } from 'zod';
import * as s from './schemas';
type ThinkingConfig = {
type: 'enabled';
budget_tokens: number;
};
type AnthropicReasoning = {
thinking?: ThinkingConfig | boolean;
thinkingBudget?: number;
};
type AnthropicInput = BedrockConverseInput & {
additionalModelRequestFields: BedrockConverseInput['additionalModelRequestFields'] &
AnthropicReasoning;
};
export const bedrockInputSchema = s.tConversationSchema
.pick({
/* LibreChat params; optionType: 'conversation' */
@ -21,11 +35,24 @@ export const bedrockInputSchema = s.tConversationSchema
temperature: true,
topP: true,
stop: true,
thinking: true,
thinkingBudget: true,
/* Catch-all fields */
topK: true,
additionalModelRequestFields: true,
})
.transform((obj) => s.removeNullishValues(obj))
.transform((obj) => {
if ((obj as AnthropicInput).additionalModelRequestFields?.thinking != null) {
const _obj = obj as AnthropicInput;
obj.thinking = !!_obj.additionalModelRequestFields.thinking;
obj.thinkingBudget =
typeof _obj.additionalModelRequestFields.thinking === 'object'
? (_obj.additionalModelRequestFields.thinking as ThinkingConfig)?.budget_tokens
: undefined;
delete obj.additionalModelRequestFields;
}
return s.removeNullishValues(obj);
})
.catch(() => ({}));
export type BedrockConverseInput = z.infer<typeof bedrockInputSchema>;
@ -49,6 +76,8 @@ export const bedrockInputParser = s.tConversationSchema
temperature: true,
topP: true,
stop: true,
thinking: true,
thinkingBudget: true,
/* Catch-all fields */
topK: true,
additionalModelRequestFields: true,
@ -87,6 +116,27 @@ export const bedrockInputParser = s.tConversationSchema
}
});
/** Default thinking and thinkingBudget for 'anthropic.claude-3-7-sonnet' models, if not defined */
if (
typeof typedData.model === 'string' &&
typedData.model.includes('anthropic.claude-3-7-sonnet')
) {
if (additionalFields.thinking === undefined) {
additionalFields.thinking = true;
} else if (additionalFields.thinking === false) {
delete additionalFields.thinking;
delete additionalFields.thinkingBudget;
}
if (additionalFields.thinking === true && additionalFields.thinkingBudget === undefined) {
additionalFields.thinkingBudget = 2000;
}
additionalFields.anthropic_beta = ['output-128k-2025-02-19'];
} else if (additionalFields.thinking != null || additionalFields.thinkingBudget != null) {
delete additionalFields.thinking;
delete additionalFields.thinkingBudget;
}
if (Object.keys(additionalFields).length > 0) {
typedData.additionalModelRequestFields = {
...((typedData.additionalModelRequestFields as Record<string, unknown> | undefined) || {}),
@ -104,9 +154,34 @@ export const bedrockInputParser = s.tConversationSchema
})
.catch(() => ({}));
/**
* Configures the "thinking" parameter based on given input and thinking options.
*
* @param data - The parsed Bedrock request options object
* @returns The object with thinking configured appropriately
*/
function configureThinking(data: AnthropicInput): AnthropicInput {
const updatedData = { ...data };
if (updatedData.additionalModelRequestFields?.thinking === true) {
updatedData.maxTokens = updatedData.maxTokens ?? updatedData.maxOutputTokens ?? 8192;
delete updatedData.maxOutputTokens;
const thinkingConfig: AnthropicReasoning['thinking'] = {
type: 'enabled',
budget_tokens: updatedData.additionalModelRequestFields.thinkingBudget ?? 2000,
};
if (thinkingConfig.budget_tokens > updatedData.maxTokens) {
thinkingConfig.budget_tokens = Math.floor(updatedData.maxTokens * 0.9);
}
updatedData.additionalModelRequestFields.thinking = thinkingConfig;
delete updatedData.additionalModelRequestFields.thinkingBudget;
}
return updatedData;
}
export const bedrockOutputParser = (data: Record<string, unknown>) => {
const knownKeys = [...Object.keys(s.tConversationSchema.shape), 'topK', 'top_k'];
const result: Record<string, unknown> = {};
let result: Record<string, unknown> = {};
// Extract known fields from the root level
Object.entries(data).forEach(([key, value]) => {
@ -125,6 +200,8 @@ export const bedrockOutputParser = (data: Record<string, unknown>) => {
if (knownKeys.includes(key)) {
if (key === 'top_k') {
result['topK'] = value;
} else if (key === 'thinking' || key === 'thinkingBudget') {
return;
} else {
result[key] = value;
}
@ -140,8 +217,11 @@ export const bedrockOutputParser = (data: Record<string, unknown>) => {
result.maxTokens = result.maxOutputTokens;
}
// Remove additionalModelRequestFields from the result
delete result.additionalModelRequestFields;
result = configureThinking(result as AnthropicInput);
// Remove additionalModelRequestFields from the result if it doesn't thinking config
if ((result as AnthropicInput).additionalModelRequestFields?.thinking == null) {
delete result.additionalModelRequestFields;
}
return result;
};

View file

@ -2,8 +2,8 @@ import { z } from 'zod';
import type { ZodError } from 'zod';
import type { TModelsConfig } from './types';
import { EModelEndpoint, eModelEndpointSchema } from './schemas';
import { fileConfigSchema } from './file-config';
import { specsConfigSchema, TSpecsConfig } from './models';
import { fileConfigSchema } from './file-config';
import { FileSources } from './types/files';
import { MCPServersSchema } from './mcp';
@ -15,6 +15,7 @@ export const defaultRetrievalModels = [
'o1-preview',
'o1-mini-2024-09-12',
'o1-mini',
'o3-mini',
'chatgpt-4o-latest',
'gpt-4o-2024-05-13',
'gpt-4o-2024-08-06',
@ -31,6 +32,27 @@ export const defaultRetrievalModels = [
'gpt-4-1106',
];
export const excludedKeys = new Set([
'conversationId',
'title',
'iconURL',
'greeting',
'endpoint',
'endpointType',
'createdAt',
'updatedAt',
'expiredAt',
'messages',
'isArchived',
'tags',
'user',
'__v',
'_id',
'tools',
'model',
'files',
]);
export enum SettingsViews {
default = 'default',
advanced = 'advanced',
@ -446,6 +468,7 @@ export const intefaceSchema = z
})
.optional(),
termsOfService: termsOfServiceSchema.optional(),
customWelcome: z.string().optional(),
endpointsMenu: z.boolean().optional(),
modelSelect: z.boolean().optional(),
parameters: z.boolean().optional(),
@ -456,6 +479,7 @@ export const intefaceSchema = z
prompts: z.boolean().optional(),
agents: z.boolean().optional(),
temporaryChat: z.boolean().optional(),
runCode: z.boolean().optional(),
})
.default({
endpointsMenu: true,
@ -468,6 +492,7 @@ export const intefaceSchema = z
prompts: true,
agents: true,
temporaryChat: true,
runCode: true,
});
export type TInterfaceConfig = z.infer<typeof intefaceSchema>;
@ -627,6 +652,8 @@ export const alternateName = {
const sharedOpenAIModels = [
'gpt-4o-mini',
'gpt-4o',
'gpt-4.5-preview',
'gpt-4.5-preview-2025-02-27',
'gpt-3.5-turbo',
'gpt-3.5-turbo-0125',
'gpt-4-turbo',
@ -645,6 +672,8 @@ const sharedOpenAIModels = [
];
const sharedAnthropicModels = [
'claude-3-7-sonnet-latest',
'claude-3-7-sonnet-20250219',
'claude-3-5-haiku-20241022',
'claude-3-5-sonnet-20241022',
'claude-3-5-sonnet-20240620',
@ -697,14 +726,14 @@ export const bedrockModels = [
export const defaultModels = {
[EModelEndpoint.azureAssistants]: sharedOpenAIModels,
[EModelEndpoint.assistants]: ['chatgpt-4o-latest', ...sharedOpenAIModels],
[EModelEndpoint.assistants]: [...sharedOpenAIModels, 'chatgpt-4o-latest'],
[EModelEndpoint.agents]: sharedOpenAIModels, // TODO: Add agent models (agentsModels)
[EModelEndpoint.google]: [
// Shared Google Models between Vertex AI & Gen AI
// Gemini 2.0 Models
'gemini-2.0-flash-001',
'gemini-2.0-flash-exp',
'gemini-2.0-flash-lite-preview-02-05',
'gemini-2.0-flash-lite',
'gemini-2.0-pro-exp-02-05',
// Gemini 1.5 Models
'gemini-1.5-flash-001',
@ -716,8 +745,8 @@ export const defaultModels = {
],
[EModelEndpoint.anthropic]: sharedAnthropicModels,
[EModelEndpoint.openAI]: [
'chatgpt-4o-latest',
...sharedOpenAIModels,
'chatgpt-4o-latest',
'gpt-4-vision-preview',
'gpt-3.5-turbo-instruct-0914',
'gpt-3.5-turbo-instruct',
@ -782,6 +811,7 @@ export const supportsBalanceCheck = {
};
export const visionModels = [
'gpt-4.5',
'gpt-4o',
'gpt-4o-mini',
'o1',
@ -830,7 +860,7 @@ export function validateVisionModel({
return visionModels.concat(additionalModels).some((visionModel) => model.includes(visionModel));
}
export const imageGenTools = new Set(['dalle', 'dall-e', 'stable-diffusion']);
export const imageGenTools = new Set(['dalle', 'dall-e', 'stable-diffusion', 'flux']);
/**
* Enum for collections using infinite queries

View file

@ -775,8 +775,40 @@ export function getBanner(): Promise<t.TBannerResponse> {
return request.get(endpoints.banner());
}
export const updateUserEncryption = (
payload: t.UpdateUserEncryptionRequest,
): Promise<t.UpdateUserEncryptionResponse> => {
return request.put(endpoints.encryption(), payload);
};
};
export function enableTwoFactor(): Promise<t.TEnable2FAResponse> {
return request.get(endpoints.enableTwoFactor());
}
export function verifyTwoFactor(
payload: t.TVerify2FARequest,
): Promise<t.TVerify2FAResponse> {
return request.post(endpoints.verifyTwoFactor(), payload);
}
export function confirmTwoFactor(
payload: t.TVerify2FARequest,
): Promise<t.TVerify2FAResponse> {
return request.post(endpoints.confirmTwoFactor(), payload);
}
export function disableTwoFactor(): Promise<t.TDisable2FAResponse> {
return request.post(endpoints.disableTwoFactor());
}
export function regenerateBackupCodes(): Promise<t.TRegenerateBackupCodesResponse> {
return request.post(endpoints.regenerateBackupCodes());
}
export function verifyTwoFactorTemp(
payload: t.TVerify2FATempRequest,
): Promise<t.TVerify2FATempResponse> {
return request.post(endpoints.verifyTwoFactorTemp(), payload);
}

View file

@ -31,5 +31,6 @@ export { default as request } from './request';
export { dataService };
import * as dataService from './data-service';
/* general helpers */
export * from './utils';
export * from './actions';
export { default as createPayload } from './createPayload';

View file

@ -68,4 +68,6 @@ export enum MutationKeys {
deleteUser = 'deleteUser',
updateRole = 'updateRole',
updateUserEncryption = 'updateUserEncryption',
enableTwoFactor = 'enableTwoFactor',
verifyTwoFactor = 'verifyTwoFactor',
}

View file

@ -1,4 +1,5 @@
import { z } from 'zod';
import { extractEnvVariable } from './utils';
const BaseOptionsSchema = z.object({
iconPath: z.string().optional(),
@ -18,8 +19,22 @@ export const StdioOptionsSchema = BaseOptionsSchema.extend({
* The environment to use when spawning the process.
*
* If not specified, the result of getDefaultEnvironment() will be used.
* Environment variables can be referenced using ${VAR_NAME} syntax.
*/
env: z.record(z.string(), z.string()).optional(),
env: z
.record(z.string(), z.string())
.optional()
.transform((env) => {
if (!env) {
return env;
}
const processedEnv: Record<string, string> = {};
for (const [key, value] of Object.entries(env)) {
processedEnv[key] = extractEnvVariable(value);
}
return processedEnv;
}),
/**
* How to handle stderr of the child process. This matches the semantics of Node's `child_process.spawn`.
*

View file

@ -19,6 +19,7 @@ import {
compactAssistantSchema,
} from './schemas';
import { bedrockInputSchema } from './bedrock';
import { extractEnvVariable } from './utils';
import { alternateName } from './config';
type EndpointSchema =
@ -122,18 +123,6 @@ export function errorsToString(errors: ZodIssue[]) {
.join(' ');
}
export const envVarRegex = /^\${(.+)}$/;
/** Extracts the value of an environment variable from a string. */
export function extractEnvVariable(value: string) {
const envVarMatch = value.match(envVarRegex);
if (envVarMatch) {
// eslint-disable-next-line @typescript-eslint/strict-boolean-expressions
return process.env[envVarMatch[1]] || value;
}
return value;
}
/** Resolves header values to env variables if detected */
export function resolveHeaders(headers: Record<string, string> | undefined) {
const resolvedHeaders = { ...(headers ?? {}) };
@ -211,6 +200,29 @@ export const parseConvo = ({
return convo;
};
/** Match GPT followed by digit, optional decimal, and optional suffix
*
* Examples: gpt-4, gpt-4o, gpt-4.5, gpt-5a, etc. */
const extractGPTVersion = (modelStr: string): string => {
const gptMatch = modelStr.match(/gpt-(\d+(?:\.\d+)?)([a-z])?/i);
if (gptMatch) {
const version = gptMatch[1];
const suffix = gptMatch[2] || '';
return `GPT-${version}${suffix}`;
}
return '';
};
/** Match omni models (o1, o3, etc.), "o" followed by a digit, possibly with decimal */
const extractOmniVersion = (modelStr: string): string => {
const omniMatch = modelStr.match(/\bo(\d+(?:\.\d+)?)\b/i);
if (omniMatch) {
const version = omniMatch[1];
return `o${version}`;
}
return '';
};
export const getResponseSender = (endpointOption: t.TEndpointOption): string => {
const {
model: _m,
@ -238,18 +250,13 @@ export const getResponseSender = (endpointOption: t.TEndpointOption): string =>
return chatGptLabel;
} else if (modelLabel) {
return modelLabel;
} else if (model && /\bo1\b/i.test(model)) {
return 'o1';
} else if (model && /\bo3\b/i.test(model)) {
return 'o3';
} else if (model && model.includes('gpt-3')) {
return 'GPT-3.5';
} else if (model && model.includes('gpt-4o')) {
return 'GPT-4o';
} else if (model && model.includes('gpt-4')) {
return 'GPT-4';
} else if (model && extractOmniVersion(model)) {
return extractOmniVersion(model);
} else if (model && model.includes('mistral')) {
return 'Mistral';
} else if (model && model.includes('gpt-')) {
const gptVersion = extractGPTVersion(model);
return gptVersion || 'GPT';
}
return (alternateName[endpoint] as string | undefined) ?? 'ChatGPT';
}
@ -279,14 +286,13 @@ export const getResponseSender = (endpointOption: t.TEndpointOption): string =>
return modelLabel;
} else if (chatGptLabel) {
return chatGptLabel;
} else if (model && extractOmniVersion(model)) {
return extractOmniVersion(model);
} else if (model && model.includes('mistral')) {
return 'Mistral';
} else if (model && model.includes('gpt-3')) {
return 'GPT-3.5';
} else if (model && model.includes('gpt-4o')) {
return 'GPT-4o';
} else if (model && model.includes('gpt-4')) {
return 'GPT-4';
} else if (model && model.includes('gpt-')) {
const gptVersion = extractGPTVersion(model);
return gptVersion || 'GPT';
} else if (modelDisplayLabel) {
return modelDisplayLabel;
}

View file

@ -91,6 +91,9 @@ axios.interceptors.response.use(
return Promise.reject(error);
}
if (originalRequest.url?.includes('/api/auth/2fa') === true) {
return Promise.reject(error);
}
if (originalRequest.url?.includes('/api/auth/logout') === true) {
return Promise.reject(error);
}

View file

@ -34,6 +34,14 @@ export enum PermissionTypes {
* Type for Multi-Conversation Permissions
*/
MULTI_CONVO = 'MULTI_CONVO',
/**
* Type for Temporary Chat
*/
TEMPORARY_CHAT = 'TEMPORARY_CHAT',
/**
* Type for using the "Run Code" LC Code Interpreter API feature
*/
RUN_CODE = 'RUN_CODE',
}
/**
@ -68,7 +76,15 @@ export const agentPermissionsSchema = z.object({
});
export const multiConvoPermissionsSchema = z.object({
[Permissions.USE]: z.boolean().default(false),
[Permissions.USE]: z.boolean().default(true),
});
export const temporaryChatPermissionsSchema = z.object({
[Permissions.USE]: z.boolean().default(true),
});
export const runCodePermissionsSchema = z.object({
[Permissions.USE]: z.boolean().default(true),
});
export const roleSchema = z.object({
@ -77,6 +93,8 @@ export const roleSchema = z.object({
[PermissionTypes.BOOKMARKS]: bookmarkPermissionsSchema,
[PermissionTypes.AGENTS]: agentPermissionsSchema,
[PermissionTypes.MULTI_CONVO]: multiConvoPermissionsSchema,
[PermissionTypes.TEMPORARY_CHAT]: temporaryChatPermissionsSchema,
[PermissionTypes.RUN_CODE]: runCodePermissionsSchema,
});
export type TRole = z.infer<typeof roleSchema>;
@ -84,6 +102,8 @@ export type TAgentPermissions = z.infer<typeof agentPermissionsSchema>;
export type TPromptPermissions = z.infer<typeof promptPermissionsSchema>;
export type TBookmarkPermissions = z.infer<typeof bookmarkPermissionsSchema>;
export type TMultiConvoPermissions = z.infer<typeof multiConvoPermissionsSchema>;
export type TTemporaryChatPermissions = z.infer<typeof temporaryChatPermissionsSchema>;
export type TRunCodePermissions = z.infer<typeof runCodePermissionsSchema>;
const defaultRolesSchema = z.object({
[SystemRoles.ADMIN]: roleSchema.extend({
@ -106,6 +126,12 @@ const defaultRolesSchema = z.object({
[PermissionTypes.MULTI_CONVO]: multiConvoPermissionsSchema.extend({
[Permissions.USE]: z.boolean().default(true),
}),
[PermissionTypes.TEMPORARY_CHAT]: temporaryChatPermissionsSchema.extend({
[Permissions.USE]: z.boolean().default(true),
}),
[PermissionTypes.RUN_CODE]: runCodePermissionsSchema.extend({
[Permissions.USE]: z.boolean().default(true),
}),
}),
[SystemRoles.USER]: roleSchema.extend({
name: z.literal(SystemRoles.USER),
@ -113,6 +139,8 @@ const defaultRolesSchema = z.object({
[PermissionTypes.BOOKMARKS]: bookmarkPermissionsSchema,
[PermissionTypes.AGENTS]: agentPermissionsSchema,
[PermissionTypes.MULTI_CONVO]: multiConvoPermissionsSchema,
[PermissionTypes.TEMPORARY_CHAT]: temporaryChatPermissionsSchema,
[PermissionTypes.RUN_CODE]: runCodePermissionsSchema,
}),
});
@ -123,6 +151,8 @@ export const roleDefaults = defaultRolesSchema.parse({
[PermissionTypes.BOOKMARKS]: {},
[PermissionTypes.AGENTS]: {},
[PermissionTypes.MULTI_CONVO]: {},
[PermissionTypes.TEMPORARY_CHAT]: {},
[PermissionTypes.RUN_CODE]: {},
},
[SystemRoles.USER]: {
name: SystemRoles.USER,
@ -130,5 +160,7 @@ export const roleDefaults = defaultRolesSchema.parse({
[PermissionTypes.BOOKMARKS]: {},
[PermissionTypes.AGENTS]: {},
[PermissionTypes.MULTI_CONVO]: {},
[PermissionTypes.TEMPORARY_CHAT]: {},
[PermissionTypes.RUN_CODE]: {},
},
});

View file

@ -179,34 +179,34 @@ export const isImageVisionTool = (tool: FunctionTool | FunctionToolCall) =>
export const openAISettings = {
model: {
default: 'gpt-4o',
default: 'gpt-4o-mini' as const,
},
temperature: {
min: 0,
max: 2,
step: 0.01,
default: 1,
min: 0 as const,
max: 2 as const,
step: 0.01 as const,
default: 1 as const,
},
top_p: {
min: 0,
max: 1,
step: 0.01,
default: 1,
min: 0 as const,
max: 1 as const,
step: 0.01 as const,
default: 1 as const,
},
presence_penalty: {
min: 0,
max: 2,
step: 0.01,
default: 0,
min: 0 as const,
max: 2 as const,
step: 0.01 as const,
default: 0 as const,
},
frequency_penalty: {
min: 0,
max: 2,
step: 0.01,
default: 0,
min: 0 as const,
max: 2 as const,
step: 0.01 as const,
default: 0 as const,
},
resendFiles: {
default: true,
default: true as const,
},
maxContextTokens: {
default: undefined,
@ -215,72 +215,85 @@ export const openAISettings = {
default: undefined,
},
imageDetail: {
default: ImageDetail.auto,
min: 0,
max: 2,
step: 1,
default: ImageDetail.auto as const,
min: 0 as const,
max: 2 as const,
step: 1 as const,
},
};
export const googleSettings = {
model: {
default: 'gemini-1.5-flash-latest',
default: 'gemini-1.5-flash-latest' as const,
},
maxOutputTokens: {
min: 1,
max: 8192,
step: 1,
default: 8192,
min: 1 as const,
max: 8192 as const,
step: 1 as const,
default: 8192 as const,
},
temperature: {
min: 0,
max: 2,
step: 0.01,
default: 1,
min: 0 as const,
max: 2 as const,
step: 0.01 as const,
default: 1 as const,
},
topP: {
min: 0,
max: 1,
step: 0.01,
default: 0.95,
min: 0 as const,
max: 1 as const,
step: 0.01 as const,
default: 0.95 as const,
},
topK: {
min: 1,
max: 40,
step: 1,
default: 40,
min: 1 as const,
max: 40 as const,
step: 1 as const,
default: 40 as const,
},
};
const ANTHROPIC_MAX_OUTPUT = 8192;
const LEGACY_ANTHROPIC_MAX_OUTPUT = 4096;
const ANTHROPIC_MAX_OUTPUT = 128000 as const;
const DEFAULT_MAX_OUTPUT = 8192 as const;
const LEGACY_ANTHROPIC_MAX_OUTPUT = 4096 as const;
export const anthropicSettings = {
model: {
default: 'claude-3-5-sonnet-20241022',
default: 'claude-3-5-sonnet-latest' as const,
},
temperature: {
min: 0,
max: 1,
step: 0.01,
default: 1,
min: 0 as const,
max: 1 as const,
step: 0.01 as const,
default: 1 as const,
},
promptCache: {
default: true,
default: true as const,
},
thinking: {
default: true as const,
},
thinkingBudget: {
min: 1024 as const,
step: 100 as const,
max: 200000 as const,
default: 2000 as const,
},
maxOutputTokens: {
min: 1,
min: 1 as const,
max: ANTHROPIC_MAX_OUTPUT,
step: 1,
default: ANTHROPIC_MAX_OUTPUT,
step: 1 as const,
default: DEFAULT_MAX_OUTPUT,
reset: (modelName: string) => {
if (modelName.includes('claude-3-5-sonnet')) {
return ANTHROPIC_MAX_OUTPUT;
if (/claude-3[-.]5-sonnet/.test(modelName) || /claude-3[-.]7/.test(modelName)) {
return DEFAULT_MAX_OUTPUT;
}
return 4096;
},
set: (value: number, modelName: string) => {
if (!modelName.includes('claude-3-5-sonnet') && value > LEGACY_ANTHROPIC_MAX_OUTPUT) {
if (
!(/claude-3[-.]5-sonnet/.test(modelName) || /claude-3[-.]7/.test(modelName)) &&
value > LEGACY_ANTHROPIC_MAX_OUTPUT
) {
return LEGACY_ANTHROPIC_MAX_OUTPUT;
}
@ -288,28 +301,28 @@ export const anthropicSettings = {
},
},
topP: {
min: 0,
max: 1,
step: 0.01,
default: 0.7,
min: 0 as const,
max: 1 as const,
step: 0.01 as const,
default: 0.7 as const,
},
topK: {
min: 1,
max: 40,
step: 1,
default: 5,
min: 1 as const,
max: 40 as const,
step: 1 as const,
default: 5 as const,
},
resendFiles: {
default: true,
default: true as const,
},
maxContextTokens: {
default: undefined,
},
legacy: {
maxOutputTokens: {
min: 1,
min: 1 as const,
max: LEGACY_ANTHROPIC_MAX_OUTPUT,
step: 1,
step: 1 as const,
default: LEGACY_ANTHROPIC_MAX_OUTPUT,
},
},
@ -317,34 +330,34 @@ export const anthropicSettings = {
export const agentsSettings = {
model: {
default: 'gpt-3.5-turbo-test',
default: 'gpt-3.5-turbo-test' as const,
},
temperature: {
min: 0,
max: 1,
step: 0.01,
default: 1,
min: 0 as const,
max: 1 as const,
step: 0.01 as const,
default: 1 as const,
},
top_p: {
min: 0,
max: 1,
step: 0.01,
default: 1,
min: 0 as const,
max: 1 as const,
step: 0.01 as const,
default: 1 as const,
},
presence_penalty: {
min: 0,
max: 2,
step: 0.01,
default: 0,
min: 0 as const,
max: 2 as const,
step: 0.01 as const,
default: 0 as const,
},
frequency_penalty: {
min: 0,
max: 2,
step: 0.01,
default: 0,
min: 0 as const,
max: 2 as const,
step: 0.01 as const,
default: 0 as const,
},
resendFiles: {
default: true,
default: true as const,
},
maxContextTokens: {
default: undefined,
@ -353,7 +366,7 @@ export const agentsSettings = {
default: undefined,
},
imageDetail: {
default: ImageDetail.auto,
default: ImageDetail.auto as const,
},
};
@ -560,6 +573,8 @@ export const tConversationSchema = z.object({
/* Anthropic */
promptCache: z.boolean().optional(),
system: z.string().optional(),
thinking: z.boolean().optional(),
thinkingBudget: coerceNumber.optional(),
/* artifacts */
artifacts: z.string().optional(),
/* google */
@ -676,6 +691,8 @@ export const tQueryParamsSchema = tConversationSchema
maxOutputTokens: true,
/** @endpoints anthropic */
promptCache: true,
thinking: true,
thinkingBudget: true,
/** @endpoints bedrock */
region: true,
/** @endpoints bedrock */
@ -751,37 +768,8 @@ export const googleSchema = tConversationSchema
spec: true,
maxContextTokens: true,
})
.transform((obj) => {
return {
...obj,
model: obj.model ?? google.model.default,
modelLabel: obj.modelLabel ?? null,
promptPrefix: obj.promptPrefix ?? null,
examples: obj.examples ?? [{ input: { content: '' }, output: { content: '' } }],
temperature: obj.temperature ?? google.temperature.default,
maxOutputTokens: obj.maxOutputTokens ?? google.maxOutputTokens.default,
topP: obj.topP ?? google.topP.default,
topK: obj.topK ?? google.topK.default,
iconURL: obj.iconURL ?? undefined,
greeting: obj.greeting ?? undefined,
spec: obj.spec ?? undefined,
maxContextTokens: obj.maxContextTokens ?? undefined,
};
})
.catch(() => ({
model: google.model.default,
modelLabel: null,
promptPrefix: null,
examples: [{ input: { content: '' }, output: { content: '' } }],
temperature: google.temperature.default,
maxOutputTokens: google.maxOutputTokens.default,
topP: google.topP.default,
topK: google.topK.default,
iconURL: undefined,
greeting: undefined,
spec: undefined,
maxContextTokens: undefined,
}));
.transform((obj: Partial<TConversation>) => removeNullishValues(obj))
.catch(() => ({}));
/**
* TODO: Map the following fields:
@ -1071,6 +1059,8 @@ export const anthropicSchema = tConversationSchema
topK: true,
resendFiles: true,
promptCache: true,
thinking: true,
thinkingBudget: true,
artifacts: true,
iconURL: true,
greeting: true,

View file

@ -100,6 +100,12 @@ export type TError = {
};
};
export type TBackupCode = {
codeHash: string;
used: boolean;
usedAt: Date | null;
};
export type TUser = {
id: string;
username: string;
@ -110,6 +116,7 @@ export type TUser = {
provider: string;
plugins?: string[];
decryptedPrivateKey?: CryptoKey | string;
backupCodes?: TBackupCode[];
createdAt: string;
updatedAt: string;
};
@ -286,11 +293,61 @@ export type TRegisterUser = {
export type TLoginUser = {
email: string;
password: string;
token?: string;
backupCode?: string;
};
export type TLoginResponse = {
token: string;
user: TUser;
token?: string;
user?: TUser;
twoFAPending?: boolean;
tempToken?: string;
};
export type TEnable2FAResponse = {
otpauthUrl: string;
backupCodes: string[];
message?: string;
};
export type TVerify2FARequest = {
token?: string;
backupCode?: string;
};
export type TVerify2FAResponse = {
message: string;
};
/**
* For verifying 2FA during login with a temporary token.
*/
export type TVerify2FATempRequest = {
tempToken: string;
token?: string;
backupCode?: string;
};
export type TVerify2FATempResponse = {
token?: string;
user?: TUser;
message?: string;
};
/**
* Response from disabling 2FA.
*/
export type TDisable2FAResponse = {
message: string;
};
/**
* Response from regenerating backup codes.
*/
export type TRegenerateBackupCodesResponse = {
message: string;
backupCodes: string[];
backupCodesHash: string[];
};
export type TRequestPasswordReset = {

View file

@ -0,0 +1,44 @@
export const envVarRegex = /^\${(.+)}$/;
/** Extracts the value of an environment variable from a string. */
export function extractEnvVariable(value: string) {
if (!value) {
return value;
}
// Trim the input
const trimmed = value.trim();
// Special case: if it's just a single environment variable
const singleMatch = trimmed.match(envVarRegex);
if (singleMatch) {
const varName = singleMatch[1];
return process.env[varName] || trimmed;
}
// For multiple variables, process them using a regex loop
const regex = /\${([^}]+)}/g;
let result = trimmed;
// First collect all matches and their positions
const matches = [];
let match;
while ((match = regex.exec(trimmed)) !== null) {
matches.push({
fullMatch: match[0],
varName: match[1],
index: match.index,
});
}
// Process matches in reverse order to avoid position shifts
for (let i = matches.length - 1; i >= 0; i--) {
const { fullMatch, varName, index } = matches[i];
const envValue = process.env[varName] || fullMatch;
// Replace at exact position
result = result.substring(0, index) + envValue + result.substring(index + fullMatch.length);
}
return result;
}