mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-16 16:30:15 +01:00
Merge branch 'dev' into feat/multi-lang-Terms-of-service
This commit is contained in:
commit
126b1fe412
323 changed files with 20207 additions and 4039 deletions
79
.env.example
79
.env.example
|
|
@ -88,7 +88,7 @@ PROXY=
|
|||
#============#
|
||||
|
||||
ANTHROPIC_API_KEY=user_provided
|
||||
# ANTHROPIC_MODELS=claude-3-7-sonnet-latest,claude-3-7-sonnet-20250219,claude-3-5-haiku-20241022,claude-3-5-sonnet-20241022,claude-3-5-sonnet-latest,claude-3-5-sonnet-20240620,claude-3-opus-20240229,claude-3-sonnet-20240229,claude-3-haiku-20240307,claude-2.1,claude-2,claude-1.2,claude-1,claude-1-100k,claude-instant-1,claude-instant-1-100k
|
||||
# ANTHROPIC_MODELS=claude-opus-4-20250514,claude-sonnet-4-20250514,claude-3-7-sonnet-20250219,claude-3-5-sonnet-20241022,claude-3-5-haiku-20241022,claude-3-opus-20240229,claude-3-sonnet-20240229,claude-3-haiku-20240307
|
||||
# ANTHROPIC_REVERSE_PROXY=
|
||||
|
||||
#============#
|
||||
|
|
@ -443,6 +443,47 @@ OPENID_IMAGE_URL=
|
|||
# Set to true to automatically redirect to the OpenID provider when a user visits the login page
|
||||
# This will bypass the login form completely for users, only use this if OpenID is your only authentication method
|
||||
OPENID_AUTO_REDIRECT=false
|
||||
# Set to true to use PKCE (Proof Key for Code Exchange) for OpenID authentication
|
||||
OPENID_USE_PKCE=false
|
||||
#Set to true to reuse openid tokens for authentication management instead of using the mongodb session and the custom refresh token.
|
||||
OPENID_REUSE_TOKENS=
|
||||
#By default, signing key verification results are cached in order to prevent excessive HTTP requests to the JWKS endpoint.
|
||||
#If a signing key matching the kid is found, this will be cached and the next time this kid is requested the signing key will be served from the cache.
|
||||
#Default is true.
|
||||
OPENID_JWKS_URL_CACHE_ENABLED=
|
||||
OPENID_JWKS_URL_CACHE_TIME= # 600000 ms eq to 10 minutes leave empty to disable caching
|
||||
#Set to true to trigger token exchange flow to acquire access token for the userinfo endpoint.
|
||||
OPENID_ON_BEHALF_FLOW_FOR_USERINFRO_REQUIRED=
|
||||
OPENID_ON_BEHALF_FLOW_USERINFRO_SCOPE = "user.read" # example for Scope Needed for Microsoft Graph API
|
||||
# Set to true to use the OpenID Connect end session endpoint for logout
|
||||
OPENID_USE_END_SESSION_ENDPOINT=
|
||||
|
||||
|
||||
# SAML
|
||||
# Note: If OpenID is enabled, SAML authentication will be automatically disabled.
|
||||
SAML_ENTRY_POINT=
|
||||
SAML_ISSUER=
|
||||
SAML_CERT=
|
||||
SAML_CALLBACK_URL=/oauth/saml/callback
|
||||
SAML_SESSION_SECRET=
|
||||
|
||||
# Attribute mappings (optional)
|
||||
SAML_EMAIL_CLAIM=
|
||||
SAML_USERNAME_CLAIM=
|
||||
SAML_GIVEN_NAME_CLAIM=
|
||||
SAML_FAMILY_NAME_CLAIM=
|
||||
SAML_PICTURE_CLAIM=
|
||||
SAML_NAME_CLAIM=
|
||||
|
||||
# Logint buttion settings (optional)
|
||||
SAML_BUTTON_LABEL=
|
||||
SAML_IMAGE_URL=
|
||||
|
||||
# Whether the SAML Response should be signed.
|
||||
# - If "true", the entire `SAML Response` will be signed.
|
||||
# - If "false" or unset, only the `SAML Assertion` will be signed (default behavior).
|
||||
# SAML_USE_AUTHN_RESPONSE_SIGNED=
|
||||
|
||||
|
||||
# LDAP
|
||||
LDAP_URL=
|
||||
|
|
@ -563,9 +604,9 @@ HELP_AND_FAQ_URL=https://librechat.ai
|
|||
# users always get the latest version. Customize #
|
||||
# only if you understand caching implications. #
|
||||
|
||||
# INDEX_HTML_CACHE_CONTROL=no-cache, no-store, must-revalidate
|
||||
# INDEX_HTML_PRAGMA=no-cache
|
||||
# INDEX_HTML_EXPIRES=0
|
||||
# INDEX_CACHE_CONTROL=no-cache, no-store, must-revalidate
|
||||
# INDEX_PRAGMA=no-cache
|
||||
# INDEX_EXPIRES=0
|
||||
|
||||
# no-cache: Forces validation with server before using cached version
|
||||
# no-store: Prevents storing the response entirely
|
||||
|
|
@ -575,3 +616,33 @@ HELP_AND_FAQ_URL=https://librechat.ai
|
|||
# OpenWeather #
|
||||
#=====================================================#
|
||||
OPENWEATHER_API_KEY=
|
||||
|
||||
#====================================#
|
||||
# LibreChat Code Interpreter API #
|
||||
#====================================#
|
||||
|
||||
# https://code.librechat.ai
|
||||
# LIBRECHAT_CODE_API_KEY=your-key
|
||||
|
||||
#======================#
|
||||
# Web Search #
|
||||
#======================#
|
||||
|
||||
# Note: All of the following variable names can be customized.
|
||||
# Omit values to allow user to provide them.
|
||||
|
||||
# For more information on configuration values, see:
|
||||
# https://librechat.ai/docs/features/web_search
|
||||
|
||||
# Search Provider (Required)
|
||||
# SERPER_API_KEY=your_serper_api_key
|
||||
|
||||
# Scraper (Required)
|
||||
# FIRECRAWL_API_KEY=your_firecrawl_api_key
|
||||
# Optional: Custom Firecrawl API URL
|
||||
# FIRECRAWL_API_URL=your_firecrawl_api_url
|
||||
|
||||
# Reranker (Required)
|
||||
# JINA_API_KEY=your_jina_api_key
|
||||
# or
|
||||
# COHERE_API_KEY=your_cohere_api_key
|
||||
7
.github/workflows/helmcharts.yml
vendored
7
.github/workflows/helmcharts.yml
vendored
|
|
@ -26,8 +26,15 @@ jobs:
|
|||
uses: azure/setup-helm@v4
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
- name: Build Subchart Deps
|
||||
run: |
|
||||
cd helm/librechat-rag-api
|
||||
helm dependency build
|
||||
|
||||
- name: Run chart-releaser
|
||||
uses: helm/chart-releaser-action@v1.6.0
|
||||
with:
|
||||
charts_dir: helm
|
||||
skip_existing: true
|
||||
env:
|
||||
CR_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
|
|
|
|||
2
.github/workflows/i18n-unused-keys.yml
vendored
2
.github/workflows/i18n-unused-keys.yml
vendored
|
|
@ -22,7 +22,7 @@ jobs:
|
|||
|
||||
# Define paths
|
||||
I18N_FILE="client/src/locales/en/translation.json"
|
||||
SOURCE_DIRS=("client/src" "api")
|
||||
SOURCE_DIRS=("client/src" "api" "packages/data-provider/src")
|
||||
|
||||
# Check if translation file exists
|
||||
if [[ ! -f "$I18N_FILE" ]]; then
|
||||
|
|
|
|||
12
.gitignore
vendored
12
.gitignore
vendored
|
|
@ -52,8 +52,9 @@ bower_components/
|
|||
*.d.ts
|
||||
!vite-env.d.ts
|
||||
|
||||
# Cline
|
||||
# AI
|
||||
.clineignore
|
||||
.cursor
|
||||
|
||||
# Floobits
|
||||
.floo
|
||||
|
|
@ -113,4 +114,13 @@ uploads/
|
|||
|
||||
# owner
|
||||
release/
|
||||
|
||||
# Helm
|
||||
helm/librechat/Chart.lock
|
||||
helm/**/charts/
|
||||
helm/**/.values.yaml
|
||||
|
||||
!/client/src/@types/i18next.d.ts
|
||||
|
||||
# SAML Idp cert
|
||||
*.cert
|
||||
|
|
|
|||
25
CHANGELOG.md
25
CHANGELOG.md
|
|
@ -5,23 +5,47 @@ All notable changes to this project will be documented in this file.
|
|||
|
||||
|
||||
|
||||
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
### ✨ New Features
|
||||
|
||||
- ✨ feat: implement search parameter updates by **@mawburn** in [#7151](https://github.com/danny-avila/LibreChat/pull/7151)
|
||||
- 🎏 feat: Add MCP support for Streamable HTTP Transport by **@benverhees** in [#7353](https://github.com/danny-avila/LibreChat/pull/7353)
|
||||
- 🔒 feat: Add Content Security Policy using Helmet middleware by **@rubentalstra** in [#7377](https://github.com/danny-avila/LibreChat/pull/7377)
|
||||
- ✨ feat: Add Normalization for MCP Server Names by **@danny-avila** in [#7421](https://github.com/danny-avila/LibreChat/pull/7421)
|
||||
- 📊 feat: Improve Helm Chart by **@hofq** in [#3638](https://github.com/danny-avila/LibreChat/pull/3638)
|
||||
- 🦾 feat: Claude-4 Support by **@danny-avila** in [#7509](https://github.com/danny-avila/LibreChat/pull/7509)
|
||||
- 🪨 feat: Bedrock Support for Claude-4 Reasoning by **@danny-avila** in [#7517](https://github.com/danny-avila/LibreChat/pull/7517)
|
||||
|
||||
### 🌍 Internationalization
|
||||
|
||||
- 🌍 i18n: Add `Danish` and `Czech` and `Catalan` localization support by **@rubentalstra** in [#7373](https://github.com/danny-avila/LibreChat/pull/7373)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#7375](https://github.com/danny-avila/LibreChat/pull/7375)
|
||||
- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#7468](https://github.com/danny-avila/LibreChat/pull/7468)
|
||||
|
||||
### 🔧 Fixes
|
||||
|
||||
- 💬 fix: update aria-label for accessibility in ConvoLink component by **@berry-13** in [#7320](https://github.com/danny-avila/LibreChat/pull/7320)
|
||||
- 🔑 fix: use `apiKey` instead of `openAIApiKey` in OpenAI-like Config by **@danny-avila** in [#7337](https://github.com/danny-avila/LibreChat/pull/7337)
|
||||
- 🔄 fix: update navigation logic in `useFocusChatEffect` to ensure correct search parameters are used by **@mawburn** in [#7340](https://github.com/danny-avila/LibreChat/pull/7340)
|
||||
- 🔄 fix: Improve MCP Connection Cleanup by **@danny-avila** in [#7400](https://github.com/danny-avila/LibreChat/pull/7400)
|
||||
- 🛡️ fix: Preset and Validation Logic for URL Query Params by **@danny-avila** in [#7407](https://github.com/danny-avila/LibreChat/pull/7407)
|
||||
- 🌘 fix: artifact of preview text is illegible in dark mode by **@nhtruong** in [#7405](https://github.com/danny-avila/LibreChat/pull/7405)
|
||||
- 🛡️ fix: Temporarily Remove CSP until Configurable by **@danny-avila** in [#7419](https://github.com/danny-avila/LibreChat/pull/7419)
|
||||
- 💽 fix: Exclude index page `/` from static cache settings by **@sbruel** in [#7382](https://github.com/danny-avila/LibreChat/pull/7382)
|
||||
|
||||
### ⚙️ Other Changes
|
||||
|
||||
- 📜 docs: CHANGELOG for release v0.7.8 by **@github-actions[bot]** in [#7290](https://github.com/danny-avila/LibreChat/pull/7290)
|
||||
- 📦 chore: Update API Package Dependencies by **@danny-avila** in [#7359](https://github.com/danny-avila/LibreChat/pull/7359)
|
||||
- 📜 docs: Unreleased Changelog by **@github-actions[bot]** in [#7321](https://github.com/danny-avila/LibreChat/pull/7321)
|
||||
- 📜 docs: Unreleased Changelog by **@github-actions[bot]** in [#7434](https://github.com/danny-avila/LibreChat/pull/7434)
|
||||
- 🛡️ chore: `multer` v2.0.0 for CVE-2025-47935 and CVE-2025-47944 by **@danny-avila** in [#7454](https://github.com/danny-avila/LibreChat/pull/7454)
|
||||
- 📂 refactor: Improve `FileAttachment` & File Form Deletion by **@danny-avila** in [#7471](https://github.com/danny-avila/LibreChat/pull/7471)
|
||||
- 📊 chore: Remove Old Helm Chart by **@hofq** in [#7512](https://github.com/danny-avila/LibreChat/pull/7512)
|
||||
- 🪖 chore: bump helm app version to v0.7.8 by **@austin-barrington** in [#7524](https://github.com/danny-avila/LibreChat/pull/7524)
|
||||
|
||||
|
||||
|
||||
|
|
@ -67,7 +91,6 @@ Changes from v0.7.8-rc1 to v0.7.8.
|
|||
|
||||
---
|
||||
## [v0.7.8-rc1] -
|
||||
## [v0.7.8-rc1] -
|
||||
|
||||
Changes from v0.7.7 to v0.7.8-rc1.
|
||||
|
||||
|
|
|
|||
|
|
@ -71,6 +71,11 @@
|
|||
- [Model Context Protocol (MCP) Support](https://modelcontextprotocol.io/clients#librechat) for Tools
|
||||
- Use LibreChat Agents and OpenAI Assistants with Files, Code Interpreter, Tools, and API Actions
|
||||
|
||||
- 🔍 **Web Search**:
|
||||
- Search the internet and retrieve relevant information to enhance your AI context
|
||||
- Combines search providers, content scrapers, and result rerankers for optimal results
|
||||
- **[Learn More →](https://www.librechat.ai/docs/features/web_search)**
|
||||
|
||||
- 🪄 **Generative UI with Code Artifacts**:
|
||||
- [Code Artifacts](https://youtu.be/GfTj7O4gmd0?si=WJbdnemZpJzBrJo3) allow creation of React, HTML, and Mermaid diagrams directly in chat
|
||||
|
||||
|
|
|
|||
|
|
@ -70,13 +70,10 @@ class AnthropicClient extends BaseClient {
|
|||
this.message_delta;
|
||||
/** Whether the model is part of the Claude 3 Family
|
||||
* @type {boolean} */
|
||||
this.isClaude3;
|
||||
this.isClaudeLatest;
|
||||
/** Whether to use Messages API or Completions API
|
||||
* @type {boolean} */
|
||||
this.useMessages;
|
||||
/** Whether or not the model is limited to the legacy amount of output tokens
|
||||
* @type {boolean} */
|
||||
this.isLegacyOutput;
|
||||
/** Whether or not the model supports Prompt Caching
|
||||
* @type {boolean} */
|
||||
this.supportsCacheControl;
|
||||
|
|
@ -116,21 +113,25 @@ class AnthropicClient extends BaseClient {
|
|||
);
|
||||
|
||||
const modelMatch = matchModelName(this.modelOptions.model, EModelEndpoint.anthropic);
|
||||
this.isClaude3 = modelMatch.includes('claude-3');
|
||||
this.isLegacyOutput = !(
|
||||
/claude-3[-.]5-sonnet/.test(modelMatch) || /claude-3[-.]7/.test(modelMatch)
|
||||
this.isClaudeLatest =
|
||||
/claude-[3-9]/.test(modelMatch) || /claude-(?:sonnet|opus|haiku)-[4-9]/.test(modelMatch);
|
||||
const isLegacyOutput = !(
|
||||
/claude-3[-.]5-sonnet/.test(modelMatch) ||
|
||||
/claude-3[-.]7/.test(modelMatch) ||
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(modelMatch) ||
|
||||
/claude-[4-9]/.test(modelMatch)
|
||||
);
|
||||
this.supportsCacheControl = this.options.promptCache && checkPromptCacheSupport(modelMatch);
|
||||
|
||||
if (
|
||||
this.isLegacyOutput &&
|
||||
isLegacyOutput &&
|
||||
this.modelOptions.maxOutputTokens &&
|
||||
this.modelOptions.maxOutputTokens > legacy.maxOutputTokens.default
|
||||
) {
|
||||
this.modelOptions.maxOutputTokens = legacy.maxOutputTokens.default;
|
||||
}
|
||||
|
||||
this.useMessages = this.isClaude3 || !!this.options.attachments;
|
||||
this.useMessages = this.isClaudeLatest || !!this.options.attachments;
|
||||
|
||||
this.defaultVisionModel = this.options.visionModel ?? 'claude-3-sonnet-20240229';
|
||||
this.options.attachments?.then((attachments) => this.checkVisionRequest(attachments));
|
||||
|
|
@ -654,7 +655,10 @@ class AnthropicClient extends BaseClient {
|
|||
);
|
||||
};
|
||||
|
||||
if (this.modelOptions.model.includes('claude-3')) {
|
||||
if (
|
||||
/claude-[3-9]/.test(this.modelOptions.model) ||
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(this.modelOptions.model)
|
||||
) {
|
||||
await buildMessagesPayload();
|
||||
processTokens();
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ describe('AnthropicClient', () => {
|
|||
{
|
||||
role: 'user',
|
||||
isCreatedByUser: true,
|
||||
text: 'What\'s up',
|
||||
text: "What's up",
|
||||
messageId: '3',
|
||||
parentMessageId: '2',
|
||||
},
|
||||
|
|
@ -170,7 +170,7 @@ describe('AnthropicClient', () => {
|
|||
client.options.modelLabel = 'Claude-2';
|
||||
const result = await client.buildMessages(messages, parentMessageId);
|
||||
const { prompt } = result;
|
||||
expect(prompt).toContain('Human\'s name: John');
|
||||
expect(prompt).toContain("Human's name: John");
|
||||
expect(prompt).toContain('You are Claude-2');
|
||||
});
|
||||
});
|
||||
|
|
@ -244,6 +244,64 @@ describe('AnthropicClient', () => {
|
|||
);
|
||||
});
|
||||
|
||||
describe('Claude 4 model headers', () => {
|
||||
it('should add "prompt-caching" beta header for claude-sonnet-4 model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
model: 'claude-sonnet-4-20250514',
|
||||
};
|
||||
client.setOptions({ modelOptions, promptCache: true });
|
||||
const anthropicClient = client.getClient(modelOptions);
|
||||
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
||||
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
||||
'prompt-caching-2024-07-31',
|
||||
);
|
||||
});
|
||||
|
||||
it('should add "prompt-caching" beta header for claude-opus-4 model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
model: 'claude-opus-4-20250514',
|
||||
};
|
||||
client.setOptions({ modelOptions, promptCache: true });
|
||||
const anthropicClient = client.getClient(modelOptions);
|
||||
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
||||
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
||||
'prompt-caching-2024-07-31',
|
||||
);
|
||||
});
|
||||
|
||||
it('should add "prompt-caching" beta header for claude-4-sonnet model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
model: 'claude-4-sonnet-20250514',
|
||||
};
|
||||
client.setOptions({ modelOptions, promptCache: true });
|
||||
const anthropicClient = client.getClient(modelOptions);
|
||||
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
||||
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
||||
'prompt-caching-2024-07-31',
|
||||
);
|
||||
});
|
||||
|
||||
it('should add "prompt-caching" beta header for claude-4-opus model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
model: 'claude-4-opus-20250514',
|
||||
};
|
||||
client.setOptions({ modelOptions, promptCache: true });
|
||||
const anthropicClient = client.getClient(modelOptions);
|
||||
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
||||
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
||||
'prompt-caching-2024-07-31',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should not add beta header for claude-3-5-sonnet-latest model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
|
|
@ -456,6 +514,34 @@ describe('AnthropicClient', () => {
|
|||
expect(client.modelOptions.maxOutputTokens).toBe(highTokenValue);
|
||||
});
|
||||
|
||||
it('should not cap maxOutputTokens for Claude 4 Sonnet models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const highTokenValue = anthropicSettings.legacy.maxOutputTokens.default * 10; // 40,960 tokens
|
||||
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-sonnet-4-20250514',
|
||||
maxOutputTokens: highTokenValue,
|
||||
},
|
||||
});
|
||||
|
||||
expect(client.modelOptions.maxOutputTokens).toBe(highTokenValue);
|
||||
});
|
||||
|
||||
it('should not cap maxOutputTokens for Claude 4 Opus models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const highTokenValue = anthropicSettings.legacy.maxOutputTokens.default * 6; // 24,576 tokens (under 32K limit)
|
||||
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-opus-4-20250514',
|
||||
maxOutputTokens: highTokenValue,
|
||||
},
|
||||
});
|
||||
|
||||
expect(client.modelOptions.maxOutputTokens).toBe(highTokenValue);
|
||||
});
|
||||
|
||||
it('should cap maxOutputTokens for Claude 3.5 Haiku models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const highTokenValue = anthropicSettings.legacy.maxOutputTokens.default * 2;
|
||||
|
|
@ -729,4 +815,223 @@ describe('AnthropicClient', () => {
|
|||
expect(capturedOptions).toHaveProperty('topK', 10);
|
||||
expect(capturedOptions).toHaveProperty('topP', 0.9);
|
||||
});
|
||||
|
||||
describe('isClaudeLatest', () => {
|
||||
it('should set isClaudeLatest to true for claude-3 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3-sonnet-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(true);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to true for claude-3.5 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3.5-sonnet-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(true);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to true for claude-sonnet-4 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-sonnet-4-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(true);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to true for claude-opus-4 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-opus-4-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(true);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to true for claude-3.5-haiku models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-3.5-haiku-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(true);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to false for claude-2 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-2',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(false);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to false for claude-instant models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-instant',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(false);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to false for claude-sonnet-3 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-sonnet-3-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(false);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to false for claude-opus-3 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-opus-3-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(false);
|
||||
});
|
||||
|
||||
it('should set isClaudeLatest to false for claude-haiku-3 models', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-haiku-3-20240229',
|
||||
},
|
||||
});
|
||||
expect(client.isClaudeLatest).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('configureReasoning', () => {
|
||||
it('should enable thinking for claude-opus-4 and claude-sonnet-4 models', async () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
// Create a mock async generator function
|
||||
async function* mockAsyncGenerator() {
|
||||
yield { type: 'message_start', message: { usage: {} } };
|
||||
yield { delta: { text: 'Test response' } };
|
||||
yield { type: 'message_delta', usage: {} };
|
||||
}
|
||||
|
||||
// Mock createResponse to return the async generator
|
||||
jest.spyOn(client, 'createResponse').mockImplementation(() => {
|
||||
return mockAsyncGenerator();
|
||||
});
|
||||
|
||||
// Test claude-opus-4
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-opus-4-20250514',
|
||||
},
|
||||
thinking: true,
|
||||
thinkingBudget: 2000,
|
||||
});
|
||||
|
||||
let capturedOptions = null;
|
||||
jest.spyOn(client, 'getClient').mockImplementation((options) => {
|
||||
capturedOptions = options;
|
||||
return {};
|
||||
});
|
||||
|
||||
const payload = [{ role: 'user', content: 'Test message' }];
|
||||
await client.sendCompletion(payload, {});
|
||||
|
||||
expect(capturedOptions).toHaveProperty('thinking');
|
||||
expect(capturedOptions.thinking).toEqual({
|
||||
type: 'enabled',
|
||||
budget_tokens: 2000,
|
||||
});
|
||||
|
||||
// Test claude-sonnet-4
|
||||
client.setOptions({
|
||||
modelOptions: {
|
||||
model: 'claude-sonnet-4-20250514',
|
||||
},
|
||||
thinking: true,
|
||||
thinkingBudget: 2000,
|
||||
});
|
||||
|
||||
await client.sendCompletion(payload, {});
|
||||
|
||||
expect(capturedOptions).toHaveProperty('thinking');
|
||||
expect(capturedOptions.thinking).toEqual({
|
||||
type: 'enabled',
|
||||
budget_tokens: 2000,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Claude Model Tests', () => {
|
||||
it('should handle Claude 3 and 4 series models correctly', () => {
|
||||
const client = new AnthropicClient('test-key');
|
||||
// Claude 3 series models
|
||||
const claude3Models = [
|
||||
'claude-3-opus-20240229',
|
||||
'claude-3-sonnet-20240229',
|
||||
'claude-3-haiku-20240307',
|
||||
'claude-3-5-sonnet-20240620',
|
||||
'claude-3-5-haiku-20240620',
|
||||
'claude-3.5-sonnet-20240620',
|
||||
'claude-3.5-haiku-20240620',
|
||||
'claude-3.7-sonnet-20240620',
|
||||
'claude-3.7-haiku-20240620',
|
||||
'anthropic/claude-3-opus-20240229',
|
||||
'claude-3-opus-20240229/anthropic',
|
||||
];
|
||||
|
||||
// Claude 4 series models
|
||||
const claude4Models = [
|
||||
'claude-sonnet-4-20250514',
|
||||
'claude-opus-4-20250514',
|
||||
'claude-4-sonnet-20250514',
|
||||
'claude-4-opus-20250514',
|
||||
'anthropic/claude-sonnet-4-20250514',
|
||||
'claude-sonnet-4-20250514/anthropic',
|
||||
];
|
||||
|
||||
// Test Claude 3 series
|
||||
claude3Models.forEach((model) => {
|
||||
client.setOptions({ modelOptions: { model } });
|
||||
expect(
|
||||
/claude-[3-9]/.test(client.modelOptions.model) ||
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(client.modelOptions.model),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
// Test Claude 4 series
|
||||
claude4Models.forEach((model) => {
|
||||
client.setOptions({ modelOptions: { model } });
|
||||
expect(
|
||||
/claude-[3-9]/.test(client.modelOptions.model) ||
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(client.modelOptions.model),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
// Test non-Claude 3/4 models
|
||||
const nonClaudeModels = ['claude-2', 'claude-instant', 'gpt-4', 'gpt-3.5-turbo'];
|
||||
|
||||
nonClaudeModels.forEach((model) => {
|
||||
client.setOptions({ modelOptions: { model } });
|
||||
expect(
|
||||
/claude-[3-9]/.test(client.modelOptions.model) ||
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(client.modelOptions.model),
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ const DEFAULT_IMAGE_EDIT_DESCRIPTION =
|
|||
|
||||
When to use \`image_edit_oai\`:
|
||||
- The user wants to modify, extend, or remix one **or more** uploaded images, either:
|
||||
- Previously generated, or in the current request (both to be included in the \`image_ids\` array).
|
||||
- Previously generated, or in the current request (both to be included in the \`image_ids\` array).
|
||||
- Always when the user refers to uploaded images for editing, enhancement, remixing, style transfer, or combining elements.
|
||||
- Any current or existing images are to be used as visual guides.
|
||||
- If there are any files in the current request, they are more likely than not expected as references for image edit requests.
|
||||
|
|
|
|||
|
|
@ -1,7 +1,13 @@
|
|||
const { SerpAPI } = require('@langchain/community/tools/serpapi');
|
||||
const { Calculator } = require('@langchain/community/tools/calculator');
|
||||
const { createCodeExecutionTool, EnvVar } = require('@librechat/agents');
|
||||
const { Tools, Constants, EToolResources } = require('librechat-data-provider');
|
||||
const { EnvVar, createCodeExecutionTool, createSearchTool } = require('@librechat/agents');
|
||||
const {
|
||||
Tools,
|
||||
Constants,
|
||||
EToolResources,
|
||||
loadWebSearchAuth,
|
||||
replaceSpecialVars,
|
||||
} = require('librechat-data-provider');
|
||||
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
|
||||
const {
|
||||
availableTools,
|
||||
|
|
@ -138,7 +144,6 @@ const loadTools = async ({
|
|||
agent,
|
||||
model,
|
||||
endpoint,
|
||||
useSpecs,
|
||||
tools = [],
|
||||
options = {},
|
||||
functions = true,
|
||||
|
|
@ -263,6 +268,33 @@ const loadTools = async ({
|
|||
return createFileSearchTool({ req: options.req, files, entity_id: agent?.id });
|
||||
};
|
||||
continue;
|
||||
} else if (tool === Tools.web_search) {
|
||||
const webSearchConfig = options?.req?.app?.locals?.webSearch;
|
||||
const result = await loadWebSearchAuth({
|
||||
userId: user,
|
||||
loadAuthValues,
|
||||
webSearchConfig,
|
||||
});
|
||||
const { onSearchResults, onGetHighlights } = options?.[Tools.web_search] ?? {};
|
||||
requestedTools[tool] = async () => {
|
||||
toolContextMap[tool] = `# \`${tool}\`:
|
||||
Current Date & Time: ${replaceSpecialVars({ text: '{{iso_datetime}}' })}
|
||||
1. **Execute immediately without preface** when using \`${tool}\`.
|
||||
2. **After the search, begin with a brief summary** that directly addresses the query without headers or explaining your process.
|
||||
3. **Structure your response clearly** using Markdown formatting (Level 2 headers for sections, lists for multiple points, tables for comparisons).
|
||||
4. **Cite sources properly** according to the citation anchor format, utilizing group anchors when appropriate.
|
||||
5. **Tailor your approach to the query type** (academic, news, coding, etc.) while maintaining an expert, journalistic, unbiased tone.
|
||||
6. **Provide comprehensive information** with specific details, examples, and as much relevant context as possible from search results.
|
||||
7. **Avoid moralizing language.**
|
||||
`.trim();
|
||||
return createSearchTool({
|
||||
...result.authResult,
|
||||
onSearchResults,
|
||||
onGetHighlights,
|
||||
logger,
|
||||
});
|
||||
};
|
||||
continue;
|
||||
} else if (tool && appTools[tool] && mcpToolPattern.test(tool)) {
|
||||
requestedTools[tool] = async () =>
|
||||
createMCPTool({
|
||||
|
|
|
|||
5
api/cache/getLogStores.js
vendored
5
api/cache/getLogStores.js
vendored
|
|
@ -61,6 +61,10 @@ const abortKeys = isRedisEnabled
|
|||
? new Keyv({ store: keyvRedis })
|
||||
: new Keyv({ namespace: CacheKeys.ABORT_KEYS, ttl: Time.TEN_MINUTES });
|
||||
|
||||
const openIdExchangedTokensCache = isRedisEnabled
|
||||
? new Keyv({ store: keyvRedis, ttl: Time.TEN_MINUTES })
|
||||
: new Keyv({ namespace: CacheKeys.OPENID_EXCHANGED_TOKENS, ttl: Time.TEN_MINUTES });
|
||||
|
||||
const namespaces = {
|
||||
[CacheKeys.ROLES]: roles,
|
||||
[CacheKeys.CONFIG_STORE]: config,
|
||||
|
|
@ -98,6 +102,7 @@ const namespaces = {
|
|||
[CacheKeys.AUDIO_RUNS]: audioRuns,
|
||||
[CacheKeys.MESSAGES]: messages,
|
||||
[CacheKeys.FLOWS]: flows,
|
||||
[CacheKeys.OPENID_EXCHANGED_TOKENS]: openIdExchangedTokensCache,
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
|
|||
11
api/cache/keyvRedis.js
vendored
11
api/cache/keyvRedis.js
vendored
|
|
@ -76,10 +76,13 @@ if (REDIS_URI && isEnabled(USE_REDIS)) {
|
|||
keyvRedis = new KeyvRedis(REDIS_URI, keyvOpts);
|
||||
}
|
||||
|
||||
const pingInterval = setInterval(() => {
|
||||
logger.debug('KeyvRedis ping');
|
||||
keyvRedis.client.ping().catch(err => logger.error('Redis keep-alive ping failed:', err));
|
||||
}, 5 * 60 * 1000);
|
||||
const pingInterval = setInterval(
|
||||
() => {
|
||||
logger.debug('KeyvRedis ping');
|
||||
keyvRedis.client.ping().catch((err) => logger.error('Redis keep-alive ping failed:', err));
|
||||
},
|
||||
5 * 60 * 1000,
|
||||
);
|
||||
|
||||
keyvRedis.on('ready', () => {
|
||||
logger.info('KeyvRedis connection ready');
|
||||
|
|
|
|||
|
|
@ -11,5 +11,8 @@ module.exports = {
|
|||
moduleNameMapper: {
|
||||
'~/(.*)': '<rootDir>/$1',
|
||||
'~/data/auth.json': '<rootDir>/__mocks__/auth.mock.json',
|
||||
'^openid-client/passport$': '<rootDir>/test/__mocks__/openid-client-passport.js', // Mock for the passport strategy part
|
||||
'^openid-client$': '<rootDir>/test/__mocks__/openid-client.js',
|
||||
},
|
||||
transformIgnorePatterns: ['/node_modules/(?!(openid-client|oauth4webapi|jose)/).*/'],
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
const mongoose = require('mongoose');
|
||||
const crypto = require('node:crypto');
|
||||
const { agentSchema } = require('@librechat/data-schemas');
|
||||
const { SystemRoles, Tools } = require('librechat-data-provider');
|
||||
const { SystemRoles, Tools, actionDelimiter } = require('librechat-data-provider');
|
||||
const { GLOBAL_PROJECT_NAME, EPHEMERAL_AGENT_ID, mcp_delimiter } =
|
||||
require('librechat-data-provider').Constants;
|
||||
const { CONFIG_STORE, STARTUP_CONFIG } = require('librechat-data-provider').CacheKeys;
|
||||
|
|
@ -11,6 +12,8 @@ const {
|
|||
removeAgentFromAllProjects,
|
||||
} = require('./Project');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { getActions } = require('./Action');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const Agent = mongoose.model('agent', agentSchema);
|
||||
|
||||
|
|
@ -21,7 +24,19 @@ const Agent = mongoose.model('agent', agentSchema);
|
|||
* @throws {Error} If the agent creation fails.
|
||||
*/
|
||||
const createAgent = async (agentData) => {
|
||||
return (await Agent.create(agentData)).toObject();
|
||||
const { author, ...versionData } = agentData;
|
||||
const timestamp = new Date();
|
||||
const initialAgentData = {
|
||||
...agentData,
|
||||
versions: [
|
||||
{
|
||||
...versionData,
|
||||
createdAt: timestamp,
|
||||
updatedAt: timestamp,
|
||||
},
|
||||
],
|
||||
};
|
||||
return (await Agent.create(initialAgentData)).toObject();
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
@ -48,12 +63,17 @@ const loadEphemeralAgent = ({ req, agent_id, endpoint, model_parameters: _m }) =
|
|||
const { model, ...model_parameters } = _m;
|
||||
/** @type {Record<string, FunctionTool>} */
|
||||
const availableTools = req.app.locals.availableTools;
|
||||
const mcpServers = new Set(req.body.ephemeralAgent?.mcp);
|
||||
/** @type {TEphemeralAgent | null} */
|
||||
const ephemeralAgent = req.body.ephemeralAgent;
|
||||
const mcpServers = new Set(ephemeralAgent?.mcp);
|
||||
/** @type {string[]} */
|
||||
const tools = [];
|
||||
if (req.body.ephemeralAgent?.execute_code === true) {
|
||||
if (ephemeralAgent?.execute_code === true) {
|
||||
tools.push(Tools.execute_code);
|
||||
}
|
||||
if (ephemeralAgent?.web_search === true) {
|
||||
tools.push(Tools.web_search);
|
||||
}
|
||||
|
||||
if (mcpServers.size > 0) {
|
||||
for (const toolName of Object.keys(availableTools)) {
|
||||
|
|
@ -103,6 +123,8 @@ const loadAgent = async ({ req, agent_id, endpoint, model_parameters }) => {
|
|||
return null;
|
||||
}
|
||||
|
||||
agent.version = agent.versions ? agent.versions.length : 0;
|
||||
|
||||
if (agent.author.toString() === req.user.id) {
|
||||
return agent;
|
||||
}
|
||||
|
|
@ -127,19 +149,207 @@ const loadAgent = async ({ req, agent_id, endpoint, model_parameters }) => {
|
|||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Check if a version already exists in the versions array, excluding timestamp and author fields
|
||||
* @param {Object} updateData - The update data to compare
|
||||
* @param {Object} currentData - The current agent data
|
||||
* @param {Array} versions - The existing versions array
|
||||
* @param {string} [actionsHash] - Hash of current action metadata
|
||||
* @returns {Object|null} - The matching version if found, null otherwise
|
||||
*/
|
||||
const isDuplicateVersion = (updateData, currentData, versions, actionsHash = null) => {
|
||||
if (!versions || versions.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const excludeFields = [
|
||||
'_id',
|
||||
'id',
|
||||
'createdAt',
|
||||
'updatedAt',
|
||||
'author',
|
||||
'updatedBy',
|
||||
'created_at',
|
||||
'updated_at',
|
||||
'__v',
|
||||
'agent_ids',
|
||||
'versions',
|
||||
'actionsHash', // Exclude actionsHash from direct comparison
|
||||
];
|
||||
|
||||
const { $push, $pull, $addToSet, ...directUpdates } = updateData;
|
||||
|
||||
if (Object.keys(directUpdates).length === 0 && !actionsHash) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const wouldBeVersion = { ...currentData, ...directUpdates };
|
||||
const lastVersion = versions[versions.length - 1];
|
||||
|
||||
if (actionsHash && lastVersion.actionsHash !== actionsHash) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const allFields = new Set([...Object.keys(wouldBeVersion), ...Object.keys(lastVersion)]);
|
||||
|
||||
const importantFields = Array.from(allFields).filter((field) => !excludeFields.includes(field));
|
||||
|
||||
let isMatch = true;
|
||||
for (const field of importantFields) {
|
||||
if (!wouldBeVersion[field] && !lastVersion[field]) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (Array.isArray(wouldBeVersion[field]) && Array.isArray(lastVersion[field])) {
|
||||
if (wouldBeVersion[field].length !== lastVersion[field].length) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
|
||||
// Special handling for projectIds (MongoDB ObjectIds)
|
||||
if (field === 'projectIds') {
|
||||
const wouldBeIds = wouldBeVersion[field].map((id) => id.toString()).sort();
|
||||
const versionIds = lastVersion[field].map((id) => id.toString()).sort();
|
||||
|
||||
if (!wouldBeIds.every((id, i) => id === versionIds[i])) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Handle arrays of objects like tool_kwargs
|
||||
else if (typeof wouldBeVersion[field][0] === 'object' && wouldBeVersion[field][0] !== null) {
|
||||
const sortedWouldBe = [...wouldBeVersion[field]].map((item) => JSON.stringify(item)).sort();
|
||||
const sortedVersion = [...lastVersion[field]].map((item) => JSON.stringify(item)).sort();
|
||||
|
||||
if (!sortedWouldBe.every((item, i) => item === sortedVersion[i])) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
const sortedWouldBe = [...wouldBeVersion[field]].sort();
|
||||
const sortedVersion = [...lastVersion[field]].sort();
|
||||
|
||||
if (!sortedWouldBe.every((item, i) => item === sortedVersion[i])) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else if (field === 'model_parameters') {
|
||||
const wouldBeParams = wouldBeVersion[field] || {};
|
||||
const lastVersionParams = lastVersion[field] || {};
|
||||
if (JSON.stringify(wouldBeParams) !== JSON.stringify(lastVersionParams)) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
} else if (wouldBeVersion[field] !== lastVersion[field]) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return isMatch ? lastVersion : null;
|
||||
};
|
||||
|
||||
/**
|
||||
* Update an agent with new data without overwriting existing
|
||||
* properties, or create a new agent if it doesn't exist.
|
||||
* When an agent is updated, a copy of the current state will be saved to the versions array.
|
||||
*
|
||||
* @param {Object} searchParameter - The search parameters to find the agent to update.
|
||||
* @param {string} searchParameter.id - The ID of the agent to update.
|
||||
* @param {string} [searchParameter.author] - The user ID of the agent's author.
|
||||
* @param {Object} updateData - An object containing the properties to update.
|
||||
* @param {Object} [options] - Optional configuration object.
|
||||
* @param {string} [options.updatingUserId] - The ID of the user performing the update (used for tracking non-author updates).
|
||||
* @param {boolean} [options.forceVersion] - Force creation of a new version even if no fields changed.
|
||||
* @returns {Promise<Agent>} The updated or newly created agent document as a plain object.
|
||||
* @throws {Error} If the update would create a duplicate version
|
||||
*/
|
||||
const updateAgent = async (searchParameter, updateData) => {
|
||||
const options = { new: true, upsert: false };
|
||||
return Agent.findOneAndUpdate(searchParameter, updateData, options).lean();
|
||||
const updateAgent = async (searchParameter, updateData, options = {}) => {
|
||||
const { updatingUserId = null, forceVersion = false } = options;
|
||||
const mongoOptions = { new: true, upsert: false };
|
||||
|
||||
const currentAgent = await Agent.findOne(searchParameter);
|
||||
if (currentAgent) {
|
||||
const { __v, _id, id, versions, author, ...versionData } = currentAgent.toObject();
|
||||
const { $push, $pull, $addToSet, ...directUpdates } = updateData;
|
||||
|
||||
let actionsHash = null;
|
||||
|
||||
// Generate actions hash if agent has actions
|
||||
if (currentAgent.actions && currentAgent.actions.length > 0) {
|
||||
// Extract action IDs from the format "domain_action_id"
|
||||
const actionIds = currentAgent.actions
|
||||
.map((action) => {
|
||||
const parts = action.split(actionDelimiter);
|
||||
return parts[1]; // Get just the action ID part
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
if (actionIds.length > 0) {
|
||||
try {
|
||||
const actions = await getActions(
|
||||
{
|
||||
action_id: { $in: actionIds },
|
||||
},
|
||||
true,
|
||||
); // Include sensitive data for hash
|
||||
|
||||
actionsHash = await generateActionMetadataHash(currentAgent.actions, actions);
|
||||
} catch (error) {
|
||||
logger.error('Error fetching actions for hash generation:', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const shouldCreateVersion =
|
||||
forceVersion ||
|
||||
(versions &&
|
||||
versions.length > 0 &&
|
||||
(Object.keys(directUpdates).length > 0 || $push || $pull || $addToSet));
|
||||
|
||||
if (shouldCreateVersion) {
|
||||
const duplicateVersion = isDuplicateVersion(updateData, versionData, versions, actionsHash);
|
||||
if (duplicateVersion && !forceVersion) {
|
||||
const error = new Error(
|
||||
'Duplicate version: This would create a version identical to an existing one',
|
||||
);
|
||||
error.statusCode = 409;
|
||||
error.details = {
|
||||
duplicateVersion,
|
||||
versionIndex: versions.findIndex(
|
||||
(v) => JSON.stringify(duplicateVersion) === JSON.stringify(v),
|
||||
),
|
||||
};
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
const versionEntry = {
|
||||
...versionData,
|
||||
...directUpdates,
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
// Include actions hash in version if available
|
||||
if (actionsHash) {
|
||||
versionEntry.actionsHash = actionsHash;
|
||||
}
|
||||
|
||||
// Always store updatedBy field to track who made the change
|
||||
if (updatingUserId) {
|
||||
versionEntry.updatedBy = new mongoose.Types.ObjectId(updatingUserId);
|
||||
}
|
||||
|
||||
if (shouldCreateVersion || forceVersion) {
|
||||
updateData.$push = {
|
||||
...($push || {}),
|
||||
versions: versionEntry,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return Agent.findOneAndUpdate(searchParameter, updateData, mongoOptions).lean();
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
@ -151,7 +361,7 @@ const updateAgent = async (searchParameter, updateData) => {
|
|||
* @param {string} params.file_id
|
||||
* @returns {Promise<Agent>} The updated agent.
|
||||
*/
|
||||
const addAgentResourceFile = async ({ agent_id, tool_resource, file_id }) => {
|
||||
const addAgentResourceFile = async ({ req, agent_id, tool_resource, file_id }) => {
|
||||
const searchParameter = { id: agent_id };
|
||||
let agent = await getAgent(searchParameter);
|
||||
if (!agent) {
|
||||
|
|
@ -177,7 +387,9 @@ const addAgentResourceFile = async ({ agent_id, tool_resource, file_id }) => {
|
|||
},
|
||||
};
|
||||
|
||||
const updatedAgent = await updateAgent(searchParameter, updateData);
|
||||
const updatedAgent = await updateAgent(searchParameter, updateData, {
|
||||
updatingUserId: req?.user?.id,
|
||||
});
|
||||
if (updatedAgent) {
|
||||
return updatedAgent;
|
||||
} else {
|
||||
|
|
@ -341,7 +553,7 @@ const updateAgentProjects = async ({ user, agentId, projectIds, removeProjectIds
|
|||
delete updateQuery.author;
|
||||
}
|
||||
|
||||
const updatedAgent = await updateAgent(updateQuery, updateOps);
|
||||
const updatedAgent = await updateAgent(updateQuery, updateOps, { updatingUserId: user.id });
|
||||
if (updatedAgent) {
|
||||
return updatedAgent;
|
||||
}
|
||||
|
|
@ -358,6 +570,97 @@ const updateAgentProjects = async ({ user, agentId, projectIds, removeProjectIds
|
|||
return await getAgent({ id: agentId });
|
||||
};
|
||||
|
||||
/**
|
||||
* Reverts an agent to a specific version in its version history.
|
||||
* @param {Object} searchParameter - The search parameters to find the agent to revert.
|
||||
* @param {string} searchParameter.id - The ID of the agent to revert.
|
||||
* @param {string} [searchParameter.author] - The user ID of the agent's author.
|
||||
* @param {number} versionIndex - The index of the version to revert to in the versions array.
|
||||
* @returns {Promise<MongoAgent>} The updated agent document after reverting.
|
||||
* @throws {Error} If the agent is not found or the specified version does not exist.
|
||||
*/
|
||||
const revertAgentVersion = async (searchParameter, versionIndex) => {
|
||||
const agent = await Agent.findOne(searchParameter);
|
||||
if (!agent) {
|
||||
throw new Error('Agent not found');
|
||||
}
|
||||
|
||||
if (!agent.versions || !agent.versions[versionIndex]) {
|
||||
throw new Error(`Version ${versionIndex} not found`);
|
||||
}
|
||||
|
||||
const revertToVersion = agent.versions[versionIndex];
|
||||
|
||||
const updateData = {
|
||||
...revertToVersion,
|
||||
};
|
||||
|
||||
delete updateData._id;
|
||||
delete updateData.id;
|
||||
delete updateData.versions;
|
||||
delete updateData.author;
|
||||
delete updateData.updatedBy;
|
||||
|
||||
return Agent.findOneAndUpdate(searchParameter, updateData, { new: true }).lean();
|
||||
};
|
||||
|
||||
/**
|
||||
* Generates a hash of action metadata for version comparison
|
||||
* @param {string[]} actionIds - Array of action IDs in format "domain_action_id"
|
||||
* @param {Action[]} actions - Array of action documents
|
||||
* @returns {Promise<string>} - SHA256 hash of the action metadata
|
||||
*/
|
||||
const generateActionMetadataHash = async (actionIds, actions) => {
|
||||
if (!actionIds || actionIds.length === 0) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Create a map of action_id to metadata for quick lookup
|
||||
const actionMap = new Map();
|
||||
actions.forEach((action) => {
|
||||
actionMap.set(action.action_id, action.metadata);
|
||||
});
|
||||
|
||||
// Sort action IDs for consistent hashing
|
||||
const sortedActionIds = [...actionIds].sort();
|
||||
|
||||
// Build a deterministic string representation of all action metadata
|
||||
const metadataString = sortedActionIds
|
||||
.map((actionFullId) => {
|
||||
// Extract just the action_id part (after the delimiter)
|
||||
const parts = actionFullId.split(actionDelimiter);
|
||||
const actionId = parts[1];
|
||||
|
||||
const metadata = actionMap.get(actionId);
|
||||
if (!metadata) {
|
||||
return `${actionId}:null`;
|
||||
}
|
||||
|
||||
// Sort metadata keys for deterministic output
|
||||
const sortedKeys = Object.keys(metadata).sort();
|
||||
const metadataStr = sortedKeys
|
||||
.map((key) => `${key}:${JSON.stringify(metadata[key])}`)
|
||||
.join(',');
|
||||
return `${actionId}:{${metadataStr}}`;
|
||||
})
|
||||
.join(';');
|
||||
|
||||
// Use Web Crypto API to generate hash
|
||||
const encoder = new TextEncoder();
|
||||
const data = encoder.encode(metadataString);
|
||||
const hashBuffer = await crypto.webcrypto.subtle.digest('SHA-256', data);
|
||||
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
||||
const hashHex = hashArray.map((b) => b.toString(16).padStart(2, '0')).join('');
|
||||
|
||||
return hashHex;
|
||||
};
|
||||
|
||||
/**
|
||||
* Load a default agent based on the endpoint
|
||||
* @param {string} endpoint
|
||||
* @returns {Agent | null}
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
Agent,
|
||||
getAgent,
|
||||
|
|
@ -366,7 +669,9 @@ module.exports = {
|
|||
updateAgent,
|
||||
deleteAgent,
|
||||
getListAgents,
|
||||
revertAgentVersion,
|
||||
updateAgentProjects,
|
||||
addAgentResourceFile,
|
||||
removeAgentResourceFiles,
|
||||
generateActionMetadataHash,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,7 +1,25 @@
|
|||
const originalEnv = {
|
||||
CREDS_KEY: process.env.CREDS_KEY,
|
||||
CREDS_IV: process.env.CREDS_IV,
|
||||
};
|
||||
|
||||
process.env.CREDS_KEY = '0123456789abcdef0123456789abcdef';
|
||||
process.env.CREDS_IV = '0123456789abcdef';
|
||||
|
||||
const mongoose = require('mongoose');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { Agent, addAgentResourceFile, removeAgentResourceFiles } = require('./Agent');
|
||||
const {
|
||||
Agent,
|
||||
addAgentResourceFile,
|
||||
removeAgentResourceFiles,
|
||||
createAgent,
|
||||
updateAgent,
|
||||
getAgent,
|
||||
deleteAgent,
|
||||
getListAgents,
|
||||
updateAgentProjects,
|
||||
} = require('./Agent');
|
||||
|
||||
describe('Agent Resource File Operations', () => {
|
||||
let mongoServer;
|
||||
|
|
@ -15,6 +33,8 @@ describe('Agent Resource File Operations', () => {
|
|||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
process.env.CREDS_KEY = originalEnv.CREDS_KEY;
|
||||
process.env.CREDS_IV = originalEnv.CREDS_IV;
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
|
|
@ -332,3 +352,725 @@ describe('Agent Resource File Operations', () => {
|
|||
expect(finalFileIds).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Agent CRUD Operations', () => {
|
||||
let mongoServer;
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
const mongoUri = mongoServer.getUri();
|
||||
await mongoose.connect(mongoUri);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await Agent.deleteMany({});
|
||||
});
|
||||
|
||||
test('should create and get an agent', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
|
||||
const newAgent = await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
description: 'Test description',
|
||||
});
|
||||
|
||||
expect(newAgent).toBeDefined();
|
||||
expect(newAgent.id).toBe(agentId);
|
||||
expect(newAgent.name).toBe('Test Agent');
|
||||
|
||||
const retrievedAgent = await getAgent({ id: agentId });
|
||||
expect(retrievedAgent).toBeDefined();
|
||||
expect(retrievedAgent.id).toBe(agentId);
|
||||
expect(retrievedAgent.name).toBe('Test Agent');
|
||||
expect(retrievedAgent.description).toBe('Test description');
|
||||
});
|
||||
|
||||
test('should delete an agent', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Agent To Delete',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
});
|
||||
|
||||
const agentBeforeDelete = await getAgent({ id: agentId });
|
||||
expect(agentBeforeDelete).toBeDefined();
|
||||
|
||||
await deleteAgent({ id: agentId });
|
||||
|
||||
const agentAfterDelete = await getAgent({ id: agentId });
|
||||
expect(agentAfterDelete).toBeNull();
|
||||
});
|
||||
|
||||
test('should list agents by author', async () => {
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const otherAuthorId = new mongoose.Types.ObjectId();
|
||||
|
||||
const agentIds = [];
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const id = `agent_${uuidv4()}`;
|
||||
agentIds.push(id);
|
||||
await createAgent({
|
||||
id,
|
||||
name: `Agent ${i}`,
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
});
|
||||
}
|
||||
|
||||
for (let i = 0; i < 3; i++) {
|
||||
await createAgent({
|
||||
id: `other_agent_${uuidv4()}`,
|
||||
name: `Other Agent ${i}`,
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: otherAuthorId,
|
||||
});
|
||||
}
|
||||
|
||||
const result = await getListAgents({ author: authorId.toString() });
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result.data).toBeDefined();
|
||||
expect(result.data).toHaveLength(5);
|
||||
expect(result.has_more).toBe(true);
|
||||
|
||||
for (const agent of result.data) {
|
||||
expect(agent.author).toBe(authorId.toString());
|
||||
}
|
||||
});
|
||||
|
||||
test('should update agent projects', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const projectId1 = new mongoose.Types.ObjectId();
|
||||
const projectId2 = new mongoose.Types.ObjectId();
|
||||
const projectId3 = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Project Test Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
projectIds: [projectId1],
|
||||
});
|
||||
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{ $addToSet: { projectIds: { $each: [projectId2, projectId3] } } },
|
||||
);
|
||||
|
||||
await updateAgent({ id: agentId }, { $pull: { projectIds: projectId1 } });
|
||||
|
||||
await updateAgent({ id: agentId }, { projectIds: [projectId2, projectId3] });
|
||||
|
||||
const updatedAgent = await getAgent({ id: agentId });
|
||||
expect(updatedAgent.projectIds).toHaveLength(2);
|
||||
expect(updatedAgent.projectIds.map((id) => id.toString())).toContain(projectId2.toString());
|
||||
expect(updatedAgent.projectIds.map((id) => id.toString())).toContain(projectId3.toString());
|
||||
expect(updatedAgent.projectIds.map((id) => id.toString())).not.toContain(projectId1.toString());
|
||||
|
||||
await updateAgent({ id: agentId }, { projectIds: [] });
|
||||
|
||||
const emptyProjectsAgent = await getAgent({ id: agentId });
|
||||
expect(emptyProjectsAgent.projectIds).toHaveLength(0);
|
||||
|
||||
const nonExistentId = `agent_${uuidv4()}`;
|
||||
await expect(
|
||||
updateAgentProjects({
|
||||
id: nonExistentId,
|
||||
projectIds: [projectId1],
|
||||
}),
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
test('should handle ephemeral agent loading', async () => {
|
||||
const agentId = 'ephemeral_test';
|
||||
const endpoint = 'openai';
|
||||
|
||||
const originalModule = jest.requireActual('librechat-data-provider');
|
||||
|
||||
const mockDataProvider = {
|
||||
...originalModule,
|
||||
Constants: {
|
||||
...originalModule.Constants,
|
||||
EPHEMERAL_AGENT_ID: 'ephemeral_test',
|
||||
},
|
||||
};
|
||||
|
||||
jest.doMock('librechat-data-provider', () => mockDataProvider);
|
||||
|
||||
const mockReq = {
|
||||
user: { id: 'user123' },
|
||||
body: {
|
||||
promptPrefix: 'This is a test instruction',
|
||||
ephemeralAgent: {
|
||||
execute_code: true,
|
||||
mcp: ['server1', 'server2'],
|
||||
},
|
||||
},
|
||||
app: {
|
||||
locals: {
|
||||
availableTools: {
|
||||
tool__server1: {},
|
||||
tool__server2: {},
|
||||
another_tool: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const params = {
|
||||
req: mockReq,
|
||||
agent_id: agentId,
|
||||
endpoint,
|
||||
model_parameters: {
|
||||
model: 'gpt-4',
|
||||
temperature: 0.7,
|
||||
},
|
||||
};
|
||||
|
||||
expect(agentId).toBeDefined();
|
||||
expect(endpoint).toBeDefined();
|
||||
|
||||
jest.dontMock('librechat-data-provider');
|
||||
});
|
||||
|
||||
test('should handle loadAgent functionality and errors', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Load Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
tools: ['tool1', 'tool2'],
|
||||
});
|
||||
|
||||
const agent = await getAgent({ id: agentId });
|
||||
|
||||
expect(agent).toBeDefined();
|
||||
expect(agent.id).toBe(agentId);
|
||||
expect(agent.name).toBe('Test Load Agent');
|
||||
expect(agent.tools).toEqual(expect.arrayContaining(['tool1', 'tool2']));
|
||||
|
||||
const mockLoadAgent = jest.fn().mockResolvedValue(agent);
|
||||
const loadedAgent = await mockLoadAgent();
|
||||
expect(loadedAgent).toBeDefined();
|
||||
expect(loadedAgent.id).toBe(agentId);
|
||||
|
||||
const nonExistentId = `agent_${uuidv4()}`;
|
||||
const nonExistentAgent = await getAgent({ id: nonExistentId });
|
||||
expect(nonExistentAgent).toBeNull();
|
||||
|
||||
const mockLoadAgentError = jest.fn().mockRejectedValue(new Error('No agent found with ID'));
|
||||
await expect(mockLoadAgentError()).rejects.toThrow('No agent found with ID');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Agent Version History', () => {
|
||||
let mongoServer;
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
const mongoUri = mongoServer.getUri();
|
||||
await mongoose.connect(mongoUri);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await Agent.deleteMany({});
|
||||
});
|
||||
|
||||
test('should create an agent with a single entry in versions array', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const agent = await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: new mongoose.Types.ObjectId(),
|
||||
});
|
||||
|
||||
expect(agent.versions).toBeDefined();
|
||||
expect(Array.isArray(agent.versions)).toBe(true);
|
||||
expect(agent.versions).toHaveLength(1);
|
||||
expect(agent.versions[0].name).toBe('Test Agent');
|
||||
expect(agent.versions[0].provider).toBe('test');
|
||||
expect(agent.versions[0].model).toBe('test-model');
|
||||
});
|
||||
|
||||
test('should accumulate version history across multiple updates', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const author = new mongoose.Types.ObjectId();
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'First Name',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author,
|
||||
description: 'First description',
|
||||
});
|
||||
|
||||
await updateAgent({ id: agentId }, { name: 'Second Name', description: 'Second description' });
|
||||
await updateAgent({ id: agentId }, { name: 'Third Name', model: 'new-model' });
|
||||
const finalAgent = await updateAgent({ id: agentId }, { description: 'Final description' });
|
||||
|
||||
expect(finalAgent.versions).toBeDefined();
|
||||
expect(Array.isArray(finalAgent.versions)).toBe(true);
|
||||
expect(finalAgent.versions).toHaveLength(4);
|
||||
|
||||
expect(finalAgent.versions[0].name).toBe('First Name');
|
||||
expect(finalAgent.versions[0].description).toBe('First description');
|
||||
expect(finalAgent.versions[0].model).toBe('test-model');
|
||||
|
||||
expect(finalAgent.versions[1].name).toBe('Second Name');
|
||||
expect(finalAgent.versions[1].description).toBe('Second description');
|
||||
expect(finalAgent.versions[1].model).toBe('test-model');
|
||||
|
||||
expect(finalAgent.versions[2].name).toBe('Third Name');
|
||||
expect(finalAgent.versions[2].description).toBe('Second description');
|
||||
expect(finalAgent.versions[2].model).toBe('new-model');
|
||||
|
||||
expect(finalAgent.versions[3].name).toBe('Third Name');
|
||||
expect(finalAgent.versions[3].description).toBe('Final description');
|
||||
expect(finalAgent.versions[3].model).toBe('new-model');
|
||||
|
||||
expect(finalAgent.name).toBe('Third Name');
|
||||
expect(finalAgent.description).toBe('Final description');
|
||||
expect(finalAgent.model).toBe('new-model');
|
||||
});
|
||||
|
||||
test('should not include metadata fields in version history', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: new mongoose.Types.ObjectId(),
|
||||
});
|
||||
|
||||
const updatedAgent = await updateAgent({ id: agentId }, { description: 'New description' });
|
||||
|
||||
expect(updatedAgent.versions).toHaveLength(2);
|
||||
expect(updatedAgent.versions[0]._id).toBeUndefined();
|
||||
expect(updatedAgent.versions[0].__v).toBeUndefined();
|
||||
expect(updatedAgent.versions[0].name).toBe('Test Agent');
|
||||
expect(updatedAgent.versions[0].author).toBeUndefined();
|
||||
|
||||
expect(updatedAgent.versions[1]._id).toBeUndefined();
|
||||
expect(updatedAgent.versions[1].__v).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should not recursively include previous versions', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: new mongoose.Types.ObjectId(),
|
||||
});
|
||||
|
||||
await updateAgent({ id: agentId }, { name: 'Updated Name 1' });
|
||||
await updateAgent({ id: agentId }, { name: 'Updated Name 2' });
|
||||
const finalAgent = await updateAgent({ id: agentId }, { name: 'Updated Name 3' });
|
||||
|
||||
expect(finalAgent.versions).toHaveLength(4);
|
||||
|
||||
finalAgent.versions.forEach((version) => {
|
||||
expect(version.versions).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle MongoDB operators and field updates correctly', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const projectId = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'MongoDB Operator Test',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
tools: ['tool1'],
|
||||
});
|
||||
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
description: 'Updated description',
|
||||
$push: { tools: 'tool2' },
|
||||
$addToSet: { projectIds: projectId },
|
||||
},
|
||||
);
|
||||
|
||||
const firstUpdate = await getAgent({ id: agentId });
|
||||
expect(firstUpdate.description).toBe('Updated description');
|
||||
expect(firstUpdate.tools).toContain('tool1');
|
||||
expect(firstUpdate.tools).toContain('tool2');
|
||||
expect(firstUpdate.projectIds.map((id) => id.toString())).toContain(projectId.toString());
|
||||
expect(firstUpdate.versions).toHaveLength(2);
|
||||
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
tools: ['tool2', 'tool3'],
|
||||
},
|
||||
);
|
||||
|
||||
const secondUpdate = await getAgent({ id: agentId });
|
||||
expect(secondUpdate.tools).toHaveLength(2);
|
||||
expect(secondUpdate.tools).toContain('tool2');
|
||||
expect(secondUpdate.tools).toContain('tool3');
|
||||
expect(secondUpdate.tools).not.toContain('tool1');
|
||||
expect(secondUpdate.versions).toHaveLength(3);
|
||||
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
$push: { tools: 'tool3' },
|
||||
},
|
||||
);
|
||||
|
||||
const thirdUpdate = await getAgent({ id: agentId });
|
||||
const toolCount = thirdUpdate.tools.filter((t) => t === 'tool3').length;
|
||||
expect(toolCount).toBe(2);
|
||||
expect(thirdUpdate.versions).toHaveLength(4);
|
||||
});
|
||||
|
||||
test('should handle parameter objects correctly', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Parameters Test',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
model_parameters: { temperature: 0.7 },
|
||||
});
|
||||
|
||||
const updatedAgent = await updateAgent(
|
||||
{ id: agentId },
|
||||
{ model_parameters: { temperature: 0.8 } },
|
||||
);
|
||||
|
||||
expect(updatedAgent.versions).toHaveLength(2);
|
||||
expect(updatedAgent.model_parameters.temperature).toBe(0.8);
|
||||
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
model_parameters: {
|
||||
temperature: 0.8,
|
||||
max_tokens: 1000,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const complexAgent = await getAgent({ id: agentId });
|
||||
expect(complexAgent.versions).toHaveLength(3);
|
||||
expect(complexAgent.model_parameters.temperature).toBe(0.8);
|
||||
expect(complexAgent.model_parameters.max_tokens).toBe(1000);
|
||||
|
||||
await updateAgent({ id: agentId }, { model_parameters: {} });
|
||||
|
||||
const emptyParamsAgent = await getAgent({ id: agentId });
|
||||
expect(emptyParamsAgent.versions).toHaveLength(4);
|
||||
expect(emptyParamsAgent.model_parameters).toEqual({});
|
||||
});
|
||||
|
||||
test('should detect duplicate versions and reject updates', async () => {
|
||||
const originalConsoleError = console.error;
|
||||
console.error = jest.fn();
|
||||
|
||||
try {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const projectId1 = new mongoose.Types.ObjectId();
|
||||
const projectId2 = new mongoose.Types.ObjectId();
|
||||
|
||||
const testCases = [
|
||||
{
|
||||
name: 'simple field update',
|
||||
initial: {
|
||||
name: 'Test Agent',
|
||||
description: 'Initial description',
|
||||
},
|
||||
update: { name: 'Updated Name' },
|
||||
duplicate: { name: 'Updated Name' },
|
||||
},
|
||||
{
|
||||
name: 'object field update',
|
||||
initial: {
|
||||
model_parameters: { temperature: 0.7 },
|
||||
},
|
||||
update: { model_parameters: { temperature: 0.8 } },
|
||||
duplicate: { model_parameters: { temperature: 0.8 } },
|
||||
},
|
||||
{
|
||||
name: 'array field update',
|
||||
initial: {
|
||||
tools: ['tool1', 'tool2'],
|
||||
},
|
||||
update: { tools: ['tool2', 'tool3'] },
|
||||
duplicate: { tools: ['tool2', 'tool3'] },
|
||||
},
|
||||
{
|
||||
name: 'projectIds update',
|
||||
initial: {
|
||||
projectIds: [projectId1],
|
||||
},
|
||||
update: { projectIds: [projectId1, projectId2] },
|
||||
duplicate: { projectIds: [projectId2, projectId1] },
|
||||
},
|
||||
];
|
||||
|
||||
for (const testCase of testCases) {
|
||||
const testAgentId = `agent_${uuidv4()}`;
|
||||
|
||||
await createAgent({
|
||||
id: testAgentId,
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
...testCase.initial,
|
||||
});
|
||||
|
||||
await updateAgent({ id: testAgentId }, testCase.update);
|
||||
|
||||
let error;
|
||||
try {
|
||||
await updateAgent({ id: testAgentId }, testCase.duplicate);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
|
||||
expect(error).toBeDefined();
|
||||
expect(error.message).toContain('Duplicate version');
|
||||
expect(error.statusCode).toBe(409);
|
||||
expect(error.details).toBeDefined();
|
||||
expect(error.details.duplicateVersion).toBeDefined();
|
||||
|
||||
const agent = await getAgent({ id: testAgentId });
|
||||
expect(agent.versions).toHaveLength(2);
|
||||
}
|
||||
} finally {
|
||||
console.error = originalConsoleError;
|
||||
}
|
||||
});
|
||||
|
||||
test('should track updatedBy when a different user updates an agent', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const originalAuthor = new mongoose.Types.ObjectId();
|
||||
const updatingUser = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Original Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: originalAuthor,
|
||||
description: 'Original description',
|
||||
});
|
||||
|
||||
const updatedAgent = await updateAgent(
|
||||
{ id: agentId },
|
||||
{ name: 'Updated Agent', description: 'Updated description' },
|
||||
{ updatingUserId: updatingUser.toString() },
|
||||
);
|
||||
|
||||
expect(updatedAgent.versions).toHaveLength(2);
|
||||
expect(updatedAgent.versions[1].updatedBy.toString()).toBe(updatingUser.toString());
|
||||
expect(updatedAgent.author.toString()).toBe(originalAuthor.toString());
|
||||
});
|
||||
|
||||
test('should include updatedBy even when the original author updates the agent', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const originalAuthor = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Original Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: originalAuthor,
|
||||
description: 'Original description',
|
||||
});
|
||||
|
||||
const updatedAgent = await updateAgent(
|
||||
{ id: agentId },
|
||||
{ name: 'Updated Agent', description: 'Updated description' },
|
||||
{ updatingUserId: originalAuthor.toString() },
|
||||
);
|
||||
|
||||
expect(updatedAgent.versions).toHaveLength(2);
|
||||
expect(updatedAgent.versions[1].updatedBy.toString()).toBe(originalAuthor.toString());
|
||||
expect(updatedAgent.author.toString()).toBe(originalAuthor.toString());
|
||||
});
|
||||
|
||||
test('should track multiple different users updating the same agent', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const originalAuthor = new mongoose.Types.ObjectId();
|
||||
const user1 = new mongoose.Types.ObjectId();
|
||||
const user2 = new mongoose.Types.ObjectId();
|
||||
const user3 = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Original Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: originalAuthor,
|
||||
description: 'Original description',
|
||||
});
|
||||
|
||||
// User 1 makes an update
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{ name: 'Updated by User 1', description: 'First update' },
|
||||
{ updatingUserId: user1.toString() },
|
||||
);
|
||||
|
||||
// Original author makes an update
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{ description: 'Updated by original author' },
|
||||
{ updatingUserId: originalAuthor.toString() },
|
||||
);
|
||||
|
||||
// User 2 makes an update
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{ name: 'Updated by User 2', model: 'new-model' },
|
||||
{ updatingUserId: user2.toString() },
|
||||
);
|
||||
|
||||
// User 3 makes an update
|
||||
const finalAgent = await updateAgent(
|
||||
{ id: agentId },
|
||||
{ description: 'Final update by User 3' },
|
||||
{ updatingUserId: user3.toString() },
|
||||
);
|
||||
|
||||
expect(finalAgent.versions).toHaveLength(5);
|
||||
expect(finalAgent.author.toString()).toBe(originalAuthor.toString());
|
||||
|
||||
// Check that each version has the correct updatedBy
|
||||
expect(finalAgent.versions[0].updatedBy).toBeUndefined(); // Initial creation has no updatedBy
|
||||
expect(finalAgent.versions[1].updatedBy.toString()).toBe(user1.toString());
|
||||
expect(finalAgent.versions[2].updatedBy.toString()).toBe(originalAuthor.toString());
|
||||
expect(finalAgent.versions[3].updatedBy.toString()).toBe(user2.toString());
|
||||
expect(finalAgent.versions[4].updatedBy.toString()).toBe(user3.toString());
|
||||
|
||||
// Verify the final state
|
||||
expect(finalAgent.name).toBe('Updated by User 2');
|
||||
expect(finalAgent.description).toBe('Final update by User 3');
|
||||
expect(finalAgent.model).toBe('new-model');
|
||||
});
|
||||
|
||||
test('should preserve original author during agent restoration', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const originalAuthor = new mongoose.Types.ObjectId();
|
||||
const updatingUser = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Original Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: originalAuthor,
|
||||
description: 'Original description',
|
||||
});
|
||||
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{ name: 'Updated Agent', description: 'Updated description' },
|
||||
{ updatingUserId: updatingUser.toString() },
|
||||
);
|
||||
|
||||
const { revertAgentVersion } = require('./Agent');
|
||||
const revertedAgent = await revertAgentVersion({ id: agentId }, 0);
|
||||
|
||||
expect(revertedAgent.author.toString()).toBe(originalAuthor.toString());
|
||||
expect(revertedAgent.name).toBe('Original Agent');
|
||||
expect(revertedAgent.description).toBe('Original description');
|
||||
});
|
||||
|
||||
test('should detect action metadata changes and force version update', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const actionId = 'testActionId123';
|
||||
|
||||
// Create agent with actions
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Agent with Actions',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
actions: [`test.com_action_${actionId}`],
|
||||
tools: ['listEvents_action_test.com', 'createEvent_action_test.com'],
|
||||
});
|
||||
|
||||
// First update with forceVersion should create a version
|
||||
const firstUpdate = await updateAgent(
|
||||
{ id: agentId },
|
||||
{ tools: ['listEvents_action_test.com', 'createEvent_action_test.com'] },
|
||||
{ updatingUserId: authorId.toString(), forceVersion: true },
|
||||
);
|
||||
|
||||
expect(firstUpdate.versions).toHaveLength(2);
|
||||
|
||||
// Second update with same data but forceVersion should still create a version
|
||||
const secondUpdate = await updateAgent(
|
||||
{ id: agentId },
|
||||
{ tools: ['listEvents_action_test.com', 'createEvent_action_test.com'] },
|
||||
{ updatingUserId: authorId.toString(), forceVersion: true },
|
||||
);
|
||||
|
||||
expect(secondUpdate.versions).toHaveLength(3);
|
||||
|
||||
// Update without forceVersion and no changes should not create a version
|
||||
let error;
|
||||
try {
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{ tools: ['listEvents_action_test.com', 'createEvent_action_test.com'] },
|
||||
{ updatingUserId: authorId.toString(), forceVersion: false },
|
||||
);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
|
||||
expect(error).toBeDefined();
|
||||
expect(error.message).toContain('Duplicate version');
|
||||
expect(error.statusCode).toBe(409);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -100,6 +100,8 @@ const tokenValues = Object.assign(
|
|||
'claude-3-5-haiku': { prompt: 0.8, completion: 4 },
|
||||
'claude-3.5-haiku': { prompt: 0.8, completion: 4 },
|
||||
'claude-3-haiku': { prompt: 0.25, completion: 1.25 },
|
||||
'claude-sonnet-4': { prompt: 3, completion: 15 },
|
||||
'claude-opus-4': { prompt: 15, completion: 75 },
|
||||
'claude-2.1': { prompt: 8, completion: 24 },
|
||||
'claude-2': { prompt: 8, completion: 24 },
|
||||
'claude-instant': { prompt: 0.8, completion: 2.4 },
|
||||
|
|
@ -162,6 +164,8 @@ const cacheTokenValues = {
|
|||
'claude-3.5-haiku': { write: 1, read: 0.08 },
|
||||
'claude-3-5-haiku': { write: 1, read: 0.08 },
|
||||
'claude-3-haiku': { write: 0.3, read: 0.03 },
|
||||
'claude-sonnet-4': { write: 3.75, read: 0.3 },
|
||||
'claude-opus-4': { write: 18.75, read: 1.5 },
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -664,3 +664,97 @@ describe('Grok Model Tests - Pricing', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Claude Model Tests', () => {
|
||||
it('should return correct prompt and completion rates for Claude 4 models', () => {
|
||||
expect(getMultiplier({ model: 'claude-sonnet-4', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['claude-sonnet-4'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'claude-sonnet-4', tokenType: 'completion' })).toBe(
|
||||
tokenValues['claude-sonnet-4'].completion,
|
||||
);
|
||||
expect(getMultiplier({ model: 'claude-opus-4', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['claude-opus-4'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'claude-opus-4', tokenType: 'completion' })).toBe(
|
||||
tokenValues['claude-opus-4'].completion,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle Claude 4 model name variations with different prefixes and suffixes', () => {
|
||||
const modelVariations = [
|
||||
'claude-sonnet-4',
|
||||
'claude-sonnet-4-20240229',
|
||||
'claude-sonnet-4-latest',
|
||||
'anthropic/claude-sonnet-4',
|
||||
'claude-sonnet-4/anthropic',
|
||||
'claude-sonnet-4-preview',
|
||||
'claude-sonnet-4-20240229-preview',
|
||||
'claude-opus-4',
|
||||
'claude-opus-4-20240229',
|
||||
'claude-opus-4-latest',
|
||||
'anthropic/claude-opus-4',
|
||||
'claude-opus-4/anthropic',
|
||||
'claude-opus-4-preview',
|
||||
'claude-opus-4-20240229-preview',
|
||||
];
|
||||
|
||||
modelVariations.forEach((model) => {
|
||||
const valueKey = getValueKey(model);
|
||||
const isSonnet = model.includes('sonnet');
|
||||
const expectedKey = isSonnet ? 'claude-sonnet-4' : 'claude-opus-4';
|
||||
|
||||
expect(valueKey).toBe(expectedKey);
|
||||
expect(getMultiplier({ model, tokenType: 'prompt' })).toBe(tokenValues[expectedKey].prompt);
|
||||
expect(getMultiplier({ model, tokenType: 'completion' })).toBe(
|
||||
tokenValues[expectedKey].completion,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should return correct cache rates for Claude 4 models', () => {
|
||||
expect(getCacheMultiplier({ model: 'claude-sonnet-4', cacheType: 'write' })).toBe(
|
||||
cacheTokenValues['claude-sonnet-4'].write,
|
||||
);
|
||||
expect(getCacheMultiplier({ model: 'claude-sonnet-4', cacheType: 'read' })).toBe(
|
||||
cacheTokenValues['claude-sonnet-4'].read,
|
||||
);
|
||||
expect(getCacheMultiplier({ model: 'claude-opus-4', cacheType: 'write' })).toBe(
|
||||
cacheTokenValues['claude-opus-4'].write,
|
||||
);
|
||||
expect(getCacheMultiplier({ model: 'claude-opus-4', cacheType: 'read' })).toBe(
|
||||
cacheTokenValues['claude-opus-4'].read,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle Claude 4 model cache rates with different prefixes and suffixes', () => {
|
||||
const modelVariations = [
|
||||
'claude-sonnet-4',
|
||||
'claude-sonnet-4-20240229',
|
||||
'claude-sonnet-4-latest',
|
||||
'anthropic/claude-sonnet-4',
|
||||
'claude-sonnet-4/anthropic',
|
||||
'claude-sonnet-4-preview',
|
||||
'claude-sonnet-4-20240229-preview',
|
||||
'claude-opus-4',
|
||||
'claude-opus-4-20240229',
|
||||
'claude-opus-4-latest',
|
||||
'anthropic/claude-opus-4',
|
||||
'claude-opus-4/anthropic',
|
||||
'claude-opus-4-preview',
|
||||
'claude-opus-4-20240229-preview',
|
||||
];
|
||||
|
||||
modelVariations.forEach((model) => {
|
||||
const isSonnet = model.includes('sonnet');
|
||||
const expectedKey = isSonnet ? 'claude-sonnet-4' : 'claude-opus-4';
|
||||
|
||||
expect(getCacheMultiplier({ model, cacheType: 'write' })).toBe(
|
||||
cacheTokenValues[expectedKey].write,
|
||||
);
|
||||
expect(getCacheMultiplier({ model, cacheType: 'read' })).toBe(
|
||||
cacheTokenValues[expectedKey].read,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -43,13 +43,14 @@
|
|||
"@google/generative-ai": "^0.23.0",
|
||||
"@googleapis/youtube": "^20.0.0",
|
||||
"@keyv/redis": "^4.3.3",
|
||||
"@langchain/community": "^0.3.42",
|
||||
"@langchain/core": "^0.3.55",
|
||||
"@langchain/google-genai": "^0.2.8",
|
||||
"@langchain/google-vertexai": "^0.2.8",
|
||||
"@langchain/community": "^0.3.44",
|
||||
"@langchain/core": "^0.3.57",
|
||||
"@langchain/google-genai": "^0.2.9",
|
||||
"@langchain/google-vertexai": "^0.2.9",
|
||||
"@langchain/textsplitters": "^0.1.0",
|
||||
"@librechat/agents": "^2.4.317",
|
||||
"@librechat/agents": "^2.4.37",
|
||||
"@librechat/data-schemas": "*",
|
||||
"@node-saml/passport-saml": "^5.0.0",
|
||||
"@waylaidwanderer/fetch-event-source": "^3.0.1",
|
||||
"axios": "^1.8.2",
|
||||
"bcryptjs": "^2.4.3",
|
||||
|
|
@ -75,6 +76,7 @@
|
|||
"ioredis": "^5.3.2",
|
||||
"js-yaml": "^4.1.0",
|
||||
"jsonwebtoken": "^9.0.0",
|
||||
"jwks-rsa": "^3.2.0",
|
||||
"keyv": "^5.3.2",
|
||||
"keyv-file": "^5.1.2",
|
||||
"klona": "^2.0.6",
|
||||
|
|
@ -86,13 +88,13 @@
|
|||
"mime": "^3.0.0",
|
||||
"module-alias": "^2.2.3",
|
||||
"mongoose": "^8.12.1",
|
||||
"multer": "^1.4.5-lts.1",
|
||||
"multer": "^2.0.0",
|
||||
"nanoid": "^3.3.7",
|
||||
"nodemailer": "^6.9.15",
|
||||
"ollama": "^0.5.0",
|
||||
"openai": "^4.96.2",
|
||||
"openai-chat-tokens": "^0.2.8",
|
||||
"openid-client": "^5.4.2",
|
||||
"openid-client": "^6.5.0",
|
||||
"passport": "^0.6.0",
|
||||
"passport-apple": "^2.0.2",
|
||||
"passport-discord": "^0.1.4",
|
||||
|
|
|
|||
|
|
@ -16,17 +16,17 @@ const FinalizationRegistry = global.FinalizationRegistry || null;
|
|||
*/
|
||||
const clientRegistry = FinalizationRegistry
|
||||
? new FinalizationRegistry((heldValue) => {
|
||||
try {
|
||||
// This will run when the client is garbage collected
|
||||
if (heldValue && heldValue.userId) {
|
||||
logger.debug(`[FinalizationRegistry] Cleaning up client for user ${heldValue.userId}`);
|
||||
} else {
|
||||
logger.debug('[FinalizationRegistry] Cleaning up client');
|
||||
try {
|
||||
// This will run when the client is garbage collected
|
||||
if (heldValue && heldValue.userId) {
|
||||
logger.debug(`[FinalizationRegistry] Cleaning up client for user ${heldValue.userId}`);
|
||||
} else {
|
||||
logger.debug('[FinalizationRegistry] Cleaning up client');
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore errors
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore errors
|
||||
}
|
||||
})
|
||||
})
|
||||
: null;
|
||||
|
||||
/**
|
||||
|
|
@ -134,15 +134,12 @@ function disposeClient(client) {
|
|||
if (client.message_delta) {
|
||||
client.message_delta = null;
|
||||
}
|
||||
if (client.isClaude3 !== undefined) {
|
||||
client.isClaude3 = null;
|
||||
if (client.isClaudeLatest !== undefined) {
|
||||
client.isClaudeLatest = null;
|
||||
}
|
||||
if (client.useMessages !== undefined) {
|
||||
client.useMessages = null;
|
||||
}
|
||||
if (client.isLegacyOutput !== undefined) {
|
||||
client.isLegacyOutput = null;
|
||||
}
|
||||
if (client.supportsCacheControl !== undefined) {
|
||||
client.supportsCacheControl = null;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
const openIdClient = require('openid-client');
|
||||
const cookies = require('cookie');
|
||||
const jwt = require('jsonwebtoken');
|
||||
const {
|
||||
|
|
@ -5,9 +6,12 @@ const {
|
|||
resetPassword,
|
||||
setAuthTokens,
|
||||
requestPasswordReset,
|
||||
setOpenIDAuthTokens,
|
||||
} = require('~/server/services/AuthService');
|
||||
const { findSession, getUserById, deleteAllUserSessions } = require('~/models');
|
||||
const { findSession, getUserById, deleteAllUserSessions, findUser } = require('~/models');
|
||||
const { getOpenIdConfig } = require('~/strategies');
|
||||
const { logger } = require('~/config');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
|
||||
const registrationController = async (req, res) => {
|
||||
try {
|
||||
|
|
@ -55,10 +59,28 @@ const resetPasswordController = async (req, res) => {
|
|||
|
||||
const refreshController = async (req, res) => {
|
||||
const refreshToken = req.headers.cookie ? cookies.parse(req.headers.cookie).refreshToken : null;
|
||||
const token_provider = req.headers.cookie
|
||||
? cookies.parse(req.headers.cookie).token_provider
|
||||
: null;
|
||||
if (!refreshToken) {
|
||||
return res.status(200).send('Refresh token not provided');
|
||||
}
|
||||
|
||||
if (token_provider === 'openid' && isEnabled(process.env.OPENID_REUSE_TOKENS) === true) {
|
||||
try {
|
||||
const openIdConfig = getOpenIdConfig();
|
||||
const tokenset = await openIdClient.refreshTokenGrant(openIdConfig, refreshToken);
|
||||
const claims = tokenset.claims();
|
||||
const user = await findUser({ email: claims.email });
|
||||
if (!user) {
|
||||
return res.status(401).redirect('/login');
|
||||
}
|
||||
const token = setOpenIDAuthTokens(tokenset, res);
|
||||
return res.status(200).send({ token, user });
|
||||
} catch (error) {
|
||||
logger.error('[refreshController] OpenID token refresh error', error);
|
||||
return res.status(403).send('Invalid OpenID refresh token');
|
||||
}
|
||||
}
|
||||
try {
|
||||
const payload = jwt.verify(refreshToken, process.env.JWT_REFRESH_SECRET);
|
||||
const user = await getUserById(payload.id, '-password -__v -totpSecret');
|
||||
|
|
|
|||
|
|
@ -1,9 +1,24 @@
|
|||
const Balance = require('~/models/Balance');
|
||||
|
||||
async function balanceController(req, res) {
|
||||
const { tokenCredits: balance = '' } =
|
||||
(await Balance.findOne({ user: req.user.id }, 'tokenCredits').lean()) ?? {};
|
||||
res.status(200).send('' + balance);
|
||||
const balanceData = await Balance.findOne(
|
||||
{ user: req.user.id },
|
||||
'-_id tokenCredits autoRefillEnabled refillIntervalValue refillIntervalUnit lastRefill refillAmount',
|
||||
).lean();
|
||||
|
||||
if (!balanceData) {
|
||||
return res.status(404).json({ error: 'Balance not found' });
|
||||
}
|
||||
|
||||
// If auto-refill is not enabled, remove auto-refill related fields from the response
|
||||
if (!balanceData.autoRefillEnabled) {
|
||||
delete balanceData.refillIntervalValue;
|
||||
delete balanceData.refillIntervalUnit;
|
||||
delete balanceData.lastRefill;
|
||||
delete balanceData.refillAmount;
|
||||
}
|
||||
|
||||
res.status(200).json(balanceData);
|
||||
}
|
||||
|
||||
module.exports = balanceController;
|
||||
|
|
|
|||
|
|
@ -1,4 +1,10 @@
|
|||
const { FileSources } = require('librechat-data-provider');
|
||||
const {
|
||||
Tools,
|
||||
Constants,
|
||||
FileSources,
|
||||
webSearchKeys,
|
||||
extractWebSearchEnvVars,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
Balance,
|
||||
getFiles,
|
||||
|
|
@ -83,7 +89,6 @@ const deleteUserFiles = async (req) => {
|
|||
const updateUserPluginsController = async (req, res) => {
|
||||
const { user } = req;
|
||||
const { pluginKey, action, auth, isEntityTool } = req.body;
|
||||
let authService;
|
||||
try {
|
||||
if (!isEntityTool) {
|
||||
const userPluginsService = await updateUserPluginsService(user, pluginKey, action);
|
||||
|
|
@ -95,32 +100,55 @@ const updateUserPluginsController = async (req, res) => {
|
|||
}
|
||||
}
|
||||
|
||||
if (auth) {
|
||||
const keys = Object.keys(auth);
|
||||
const values = Object.values(auth);
|
||||
if (action === 'install' && keys.length > 0) {
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
authService = await updateUserPluginAuth(user.id, keys[i], pluginKey, values[i]);
|
||||
if (authService instanceof Error) {
|
||||
logger.error('[authService]', authService);
|
||||
const { status, message } = authService;
|
||||
res.status(status).send({ message });
|
||||
}
|
||||
if (auth == null) {
|
||||
return res.status(200).send();
|
||||
}
|
||||
|
||||
let keys = Object.keys(auth);
|
||||
if (keys.length === 0 && pluginKey !== Tools.web_search) {
|
||||
return res.status(200).send();
|
||||
}
|
||||
const values = Object.values(auth);
|
||||
|
||||
/** @type {number} */
|
||||
let status = 200;
|
||||
/** @type {string} */
|
||||
let message;
|
||||
/** @type {IPluginAuth | Error} */
|
||||
let authService;
|
||||
|
||||
if (pluginKey === Tools.web_search) {
|
||||
/** @type {TCustomConfig['webSearch']} */
|
||||
const webSearchConfig = req.app.locals?.webSearch;
|
||||
keys = extractWebSearchEnvVars({
|
||||
keys: action === 'install' ? keys : webSearchKeys,
|
||||
config: webSearchConfig,
|
||||
});
|
||||
}
|
||||
|
||||
if (action === 'install') {
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
authService = await updateUserPluginAuth(user.id, keys[i], pluginKey, values[i]);
|
||||
if (authService instanceof Error) {
|
||||
logger.error('[authService]', authService);
|
||||
({ status, message } = authService);
|
||||
}
|
||||
}
|
||||
if (action === 'uninstall' && keys.length > 0) {
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
authService = await deleteUserPluginAuth(user.id, keys[i]);
|
||||
if (authService instanceof Error) {
|
||||
logger.error('[authService]', authService);
|
||||
const { status, message } = authService;
|
||||
res.status(status).send({ message });
|
||||
}
|
||||
} else if (action === 'uninstall') {
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
authService = await deleteUserPluginAuth(user.id, keys[i]);
|
||||
if (authService instanceof Error) {
|
||||
logger.error('[authService]', authService);
|
||||
({ status, message } = authService);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
res.status(200).send();
|
||||
if (status === 200) {
|
||||
return res.status(status).send();
|
||||
}
|
||||
|
||||
res.status(status).send({ message });
|
||||
} catch (err) {
|
||||
logger.error('[updateUserPluginsController]', err);
|
||||
return res.status(500).json({ message: 'Something went wrong.' });
|
||||
|
|
|
|||
|
|
@ -237,6 +237,30 @@ function createToolEndCallback({ req, res, artifactPromises }) {
|
|||
return;
|
||||
}
|
||||
|
||||
if (output.artifact[Tools.web_search]) {
|
||||
artifactPromises.push(
|
||||
(async () => {
|
||||
const name = `${output.name}_${output.tool_call_id}_${nanoid()}`;
|
||||
const attachment = {
|
||||
name,
|
||||
type: Tools.web_search,
|
||||
messageId: metadata.run_id,
|
||||
toolCallId: output.tool_call_id,
|
||||
conversationId: metadata.thread_id,
|
||||
[Tools.web_search]: { ...output.artifact[Tools.web_search] },
|
||||
};
|
||||
if (!res.headersSent) {
|
||||
return attachment;
|
||||
}
|
||||
res.write(`event: attachment\ndata: ${JSON.stringify(attachment)}\n\n`);
|
||||
return attachment;
|
||||
})().catch((error) => {
|
||||
logger.error('Error processing artifact content:', error);
|
||||
return null;
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (output.artifact.content) {
|
||||
/** @type {FormattedContent[]} */
|
||||
const content = output.artifact.content;
|
||||
|
|
|
|||
|
|
@ -39,9 +39,6 @@ const BaseClient = require('~/app/clients/BaseClient');
|
|||
const { logger, sendEvent } = require('~/config');
|
||||
const { createRun } = require('./run');
|
||||
|
||||
/** @typedef {import('@librechat/agents').MessageContentComplex} MessageContentComplex */
|
||||
/** @typedef {import('@langchain/core/runnables').RunnableConfig} RunnableConfig */
|
||||
|
||||
/**
|
||||
* @param {ServerRequest} req
|
||||
* @param {Agent} agent
|
||||
|
|
@ -543,7 +540,7 @@ class AgentClient extends BaseClient {
|
|||
}
|
||||
|
||||
async chatCompletion({ payload, abortController = null }) {
|
||||
/** @type {Partial<RunnableConfig> & { version: 'v1' | 'v2'; run_id?: string; streamMode: string }} */
|
||||
/** @type {Partial<GraphRunnableConfig>} */
|
||||
let config;
|
||||
/** @type {ReturnType<createRun>} */
|
||||
let run;
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ const { updateAction, getActions } = require('~/models/Action');
|
|||
const { updateAgentProjects } = require('~/models/Agent');
|
||||
const { getProjectByName } = require('~/models/Project');
|
||||
const { deleteFileByFilter } = require('~/models/File');
|
||||
const { revertAgentVersion } = require('~/models/Agent');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const systemTools = {
|
||||
|
|
@ -104,11 +105,13 @@ const getAgentHandler = async (req, res) => {
|
|||
return res.status(404).json({ error: 'Agent not found' });
|
||||
}
|
||||
|
||||
agent.version = agent.versions ? agent.versions.length : 0;
|
||||
|
||||
if (agent.avatar && agent.avatar?.source === FileSources.s3) {
|
||||
const originalUrl = agent.avatar.filepath;
|
||||
agent.avatar.filepath = await refreshS3Url(agent.avatar);
|
||||
if (originalUrl !== agent.avatar.filepath) {
|
||||
await updateAgent({ id }, { avatar: agent.avatar });
|
||||
await updateAgent({ id }, { avatar: agent.avatar }, { updatingUserId: req.user.id });
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -127,6 +130,7 @@ const getAgentHandler = async (req, res) => {
|
|||
author: agent.author,
|
||||
projectIds: agent.projectIds,
|
||||
isCollaborative: agent.isCollaborative,
|
||||
version: agent.version,
|
||||
});
|
||||
}
|
||||
return res.status(200).json(agent);
|
||||
|
|
@ -165,7 +169,9 @@ const updateAgentHandler = async (req, res) => {
|
|||
}
|
||||
|
||||
let updatedAgent =
|
||||
Object.keys(updateData).length > 0 ? await updateAgent({ id }, updateData) : existingAgent;
|
||||
Object.keys(updateData).length > 0
|
||||
? await updateAgent({ id }, updateData, { updatingUserId: req.user.id })
|
||||
: existingAgent;
|
||||
|
||||
if (projectIds || removeProjectIds) {
|
||||
updatedAgent = await updateAgentProjects({
|
||||
|
|
@ -187,6 +193,14 @@ const updateAgentHandler = async (req, res) => {
|
|||
return res.json(updatedAgent);
|
||||
} catch (error) {
|
||||
logger.error('[/Agents/:id] Error updating Agent', error);
|
||||
|
||||
if (error.statusCode === 409) {
|
||||
return res.status(409).json({
|
||||
error: error.message,
|
||||
details: error.details,
|
||||
});
|
||||
}
|
||||
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
|
@ -393,7 +407,11 @@ const uploadAgentAvatarHandler = async (req, res) => {
|
|||
},
|
||||
};
|
||||
|
||||
promises.push(await updateAgent({ id: agent_id, author: req.user.id }, data));
|
||||
promises.push(
|
||||
await updateAgent({ id: agent_id, author: req.user.id }, data, {
|
||||
updatingUserId: req.user.id,
|
||||
}),
|
||||
);
|
||||
|
||||
const resolved = await Promise.all(promises);
|
||||
res.status(201).json(resolved[0]);
|
||||
|
|
@ -411,6 +429,66 @@ const uploadAgentAvatarHandler = async (req, res) => {
|
|||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Reverts an agent to a previous version from its version history.
|
||||
* @route PATCH /agents/:id/revert
|
||||
* @param {object} req - Express Request object
|
||||
* @param {object} req.params - Request parameters
|
||||
* @param {string} req.params.id - The ID of the agent to revert
|
||||
* @param {object} req.body - Request body
|
||||
* @param {number} req.body.version_index - The index of the version to revert to
|
||||
* @param {object} req.user - Authenticated user information
|
||||
* @param {string} req.user.id - User ID
|
||||
* @param {string} req.user.role - User role
|
||||
* @param {ServerResponse} res - Express Response object
|
||||
* @returns {Promise<Agent>} 200 - The updated agent after reverting to the specified version
|
||||
* @throws {Error} 400 - If version_index is missing
|
||||
* @throws {Error} 403 - If user doesn't have permission to modify the agent
|
||||
* @throws {Error} 404 - If agent not found
|
||||
* @throws {Error} 500 - If there's an internal server error during the reversion process
|
||||
*/
|
||||
const revertAgentVersionHandler = async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
const { version_index } = req.body;
|
||||
|
||||
if (version_index === undefined) {
|
||||
return res.status(400).json({ error: 'version_index is required' });
|
||||
}
|
||||
|
||||
const isAdmin = req.user.role === SystemRoles.ADMIN;
|
||||
const existingAgent = await getAgent({ id });
|
||||
|
||||
if (!existingAgent) {
|
||||
return res.status(404).json({ error: 'Agent not found' });
|
||||
}
|
||||
|
||||
const isAuthor = existingAgent.author.toString() === req.user.id;
|
||||
const hasEditPermission = existingAgent.isCollaborative || isAdmin || isAuthor;
|
||||
|
||||
if (!hasEditPermission) {
|
||||
return res.status(403).json({
|
||||
error: 'You do not have permission to modify this non-collaborative agent',
|
||||
});
|
||||
}
|
||||
|
||||
const updatedAgent = await revertAgentVersion({ id }, version_index);
|
||||
|
||||
if (updatedAgent.author) {
|
||||
updatedAgent.author = updatedAgent.author.toString();
|
||||
}
|
||||
|
||||
if (updatedAgent.author !== req.user.id) {
|
||||
delete updatedAgent.author;
|
||||
}
|
||||
|
||||
return res.json(updatedAgent);
|
||||
} catch (error) {
|
||||
logger.error('[/agents/:id/revert] Error reverting Agent version', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
createAgent: createAgentHandler,
|
||||
getAgent: getAgentHandler,
|
||||
|
|
@ -419,4 +497,5 @@ module.exports = {
|
|||
deleteAgent: deleteAgentHandler,
|
||||
getListAgents: getListAgentsHandler,
|
||||
uploadAgentAvatar: uploadAgentAvatarHandler,
|
||||
revertAgentVersion: revertAgentVersionHandler,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -326,8 +326,15 @@ const chatV1 = async (req, res) => {
|
|||
|
||||
file_ids = files.map(({ file_id }) => file_id);
|
||||
if (file_ids.length || thread_file_ids.length) {
|
||||
userMessage.file_ids = file_ids;
|
||||
attachedFileIds = new Set([...file_ids, ...thread_file_ids]);
|
||||
if (endpoint === EModelEndpoint.azureAssistants) {
|
||||
userMessage.attachments = Array.from(attachedFileIds).map((file_id) => ({
|
||||
file_id,
|
||||
tools: [{ type: 'file_search' }],
|
||||
}));
|
||||
} else {
|
||||
userMessage.file_ids = Array.from(attachedFileIds);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const cookies = require('cookie');
|
||||
const { Issuer } = require('openid-client');
|
||||
const { getOpenIdConfig } = require('~/strategies');
|
||||
const { logoutUser } = require('~/server/services/AuthService');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
|
@ -10,20 +10,29 @@ const logoutController = async (req, res) => {
|
|||
const logout = await logoutUser(req, refreshToken);
|
||||
const { status, message } = logout;
|
||||
res.clearCookie('refreshToken');
|
||||
res.clearCookie('token_provider');
|
||||
const response = { message };
|
||||
if (
|
||||
req.user.openidId != null &&
|
||||
isEnabled(process.env.OPENID_USE_END_SESSION_ENDPOINT) &&
|
||||
process.env.OPENID_ISSUER
|
||||
) {
|
||||
const issuer = await Issuer.discover(process.env.OPENID_ISSUER);
|
||||
const redirect = issuer.metadata.end_session_endpoint;
|
||||
if (!redirect) {
|
||||
const openIdConfig = getOpenIdConfig();
|
||||
if (!openIdConfig) {
|
||||
logger.warn(
|
||||
'[logoutController] end_session_endpoint not found in OpenID issuer metadata. Please verify that the issuer is correct.',
|
||||
'[logoutController] OpenID config not found. Please verify that the open id configuration and initialization are correct.',
|
||||
);
|
||||
} else {
|
||||
response.redirect = redirect;
|
||||
const endSessionEndpoint = openIdConfig
|
||||
? openIdConfig.serverMetadata().end_session_endpoint
|
||||
: null;
|
||||
if (endSessionEndpoint) {
|
||||
response.redirect = endSessionEndpoint;
|
||||
} else {
|
||||
logger.warn(
|
||||
'[logoutController] end_session_endpoint not found in OpenID issuer metadata. Please verify that the issuer is correct.',
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
return res.status(status).send(response);
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ const {
|
|||
Permissions,
|
||||
ToolCallTypes,
|
||||
PermissionTypes,
|
||||
loadWebSearchAuth,
|
||||
} = require('librechat-data-provider');
|
||||
const { processFileURL, uploadImageBuffer } = require('~/server/services/Files/process');
|
||||
const { processCodeOutput } = require('~/server/services/Files/Code/process');
|
||||
|
|
@ -24,6 +25,36 @@ const toolAccessPermType = {
|
|||
[Tools.execute_code]: PermissionTypes.RUN_CODE,
|
||||
};
|
||||
|
||||
/**
|
||||
* Verifies web search authentication, ensuring each category has at least
|
||||
* one fully authenticated service.
|
||||
*
|
||||
* @param {ServerRequest} req - The request object
|
||||
* @param {ServerResponse} res - The response object
|
||||
* @returns {Promise<void>} A promise that resolves when the function has completed
|
||||
*/
|
||||
const verifyWebSearchAuth = async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
/** @type {TCustomConfig['webSearch']} */
|
||||
const webSearchConfig = req.app.locals?.webSearch || {};
|
||||
const result = await loadWebSearchAuth({
|
||||
userId,
|
||||
loadAuthValues,
|
||||
webSearchConfig,
|
||||
throwError: false,
|
||||
});
|
||||
|
||||
return res.status(200).json({
|
||||
authenticated: result.authenticated,
|
||||
authTypes: result.authTypes,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error in verifyWebSearchAuth:', error);
|
||||
return res.status(500).json({ message: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {ServerRequest} req - The request object, containing information about the HTTP request.
|
||||
* @param {ServerResponse} res - The response object, used to send back the desired HTTP response.
|
||||
|
|
@ -32,6 +63,9 @@ const toolAccessPermType = {
|
|||
const verifyToolAuth = async (req, res) => {
|
||||
try {
|
||||
const { toolId } = req.params;
|
||||
if (toolId === Tools.web_search) {
|
||||
return await verifyWebSearchAuth(req, res);
|
||||
}
|
||||
const authFields = fieldsMap[toolId];
|
||||
if (!authFields) {
|
||||
res.status(404).json({ message: 'Tool not found' });
|
||||
|
|
|
|||
|
|
@ -24,10 +24,13 @@ const routes = require('./routes');
|
|||
|
||||
const { PORT, HOST, ALLOW_SOCIAL_LOGIN, DISABLE_COMPRESSION, TRUST_PROXY } = process.env ?? {};
|
||||
|
||||
const port = Number(PORT) || 3080;
|
||||
// Allow PORT=0 to be used for automatic free port assignment
|
||||
const port = isNaN(Number(PORT)) ? 3080 : Number(PORT);
|
||||
const host = HOST || 'localhost';
|
||||
const trusted_proxy = Number(TRUST_PROXY) || 1; /* trust first proxy by default */
|
||||
|
||||
const app = express();
|
||||
|
||||
const startServer = async () => {
|
||||
if (typeof Bun !== 'undefined') {
|
||||
axios.defaults.headers.common['Accept-Encoding'] = 'gzip';
|
||||
|
|
@ -36,8 +39,9 @@ const startServer = async () => {
|
|||
logger.info('Connected to MongoDB');
|
||||
await indexSync();
|
||||
|
||||
const app = express();
|
||||
app.disable('x-powered-by');
|
||||
app.set('trust proxy', trusted_proxy);
|
||||
|
||||
await AppService(app);
|
||||
|
||||
const indexPath = path.join(app.locals.paths.dist, 'index.html');
|
||||
|
|
@ -49,28 +53,29 @@ const startServer = async () => {
|
|||
app.use(noIndex);
|
||||
app.use(errorController);
|
||||
app.use(express.json({ limit: '3mb' }));
|
||||
app.use(mongoSanitize());
|
||||
app.use(express.urlencoded({ extended: true, limit: '3mb' }));
|
||||
app.use(staticCache(app.locals.paths.dist));
|
||||
app.use(staticCache(app.locals.paths.fonts));
|
||||
app.use(staticCache(app.locals.paths.assets));
|
||||
app.set('trust proxy', trusted_proxy);
|
||||
app.use(mongoSanitize());
|
||||
app.use(cors());
|
||||
app.use(cookieParser());
|
||||
|
||||
if (!isEnabled(DISABLE_COMPRESSION)) {
|
||||
app.use(compression());
|
||||
} else {
|
||||
console.warn('Response compression has been disabled via DISABLE_COMPRESSION.');
|
||||
}
|
||||
|
||||
// Serve static assets with aggressive caching
|
||||
app.use(staticCache(app.locals.paths.dist));
|
||||
app.use(staticCache(app.locals.paths.fonts));
|
||||
app.use(staticCache(app.locals.paths.assets));
|
||||
|
||||
if (!ALLOW_SOCIAL_LOGIN) {
|
||||
console.warn(
|
||||
'Social logins are disabled. Set Environment Variable "ALLOW_SOCIAL_LOGIN" to true to enable them.',
|
||||
);
|
||||
console.warn('Social logins are disabled. Set ALLOW_SOCIAL_LOGIN=true to enable them.');
|
||||
}
|
||||
|
||||
/* OAUTH */
|
||||
app.use(passport.initialize());
|
||||
passport.use(await jwtLogin());
|
||||
passport.use(jwtLogin());
|
||||
passport.use(passportLogin());
|
||||
|
||||
/* LDAP Auth */
|
||||
|
|
@ -79,7 +84,7 @@ const startServer = async () => {
|
|||
}
|
||||
|
||||
if (isEnabled(ALLOW_SOCIAL_LOGIN)) {
|
||||
configureSocialLogins(app);
|
||||
await configureSocialLogins(app);
|
||||
}
|
||||
|
||||
app.use('/oauth', routes.oauth);
|
||||
|
|
@ -128,7 +133,7 @@ const startServer = async () => {
|
|||
});
|
||||
|
||||
app.listen(port, host, () => {
|
||||
if (host == '0.0.0.0') {
|
||||
if (host === '0.0.0.0') {
|
||||
logger.info(
|
||||
`Server listening on all interfaces at port ${port}. Use http://localhost:${port} to access it`,
|
||||
);
|
||||
|
|
@ -176,3 +181,6 @@ process.on('uncaughtException', (err) => {
|
|||
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// export app for easier testing purposes
|
||||
module.exports = app;
|
||||
|
|
|
|||
82
api/server/index.spec.js
Normal file
82
api/server/index.spec.js
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const request = require('supertest');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
jest.mock('~/server/services/Config/loadCustomConfig', () => {
|
||||
return jest.fn(() => Promise.resolve({}));
|
||||
});
|
||||
|
||||
describe('Server Configuration', () => {
|
||||
// Increase the default timeout to allow for Mongo cleanup
|
||||
jest.setTimeout(30_000);
|
||||
|
||||
let mongoServer;
|
||||
let app;
|
||||
|
||||
/** Mocked fs.readFileSync for index.html */
|
||||
const originalReadFileSync = fs.readFileSync;
|
||||
beforeAll(() => {
|
||||
fs.readFileSync = function (filepath, options) {
|
||||
if (filepath.includes('index.html')) {
|
||||
return '<!DOCTYPE html><html><head><title>LibreChat</title></head><body><div id="root"></div></body></html>';
|
||||
}
|
||||
return originalReadFileSync(filepath, options);
|
||||
};
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
// Restore original fs.readFileSync
|
||||
fs.readFileSync = originalReadFileSync;
|
||||
});
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
process.env.MONGO_URI = mongoServer.getUri();
|
||||
process.env.PORT = '0'; // Use a random available port
|
||||
app = require('~/server');
|
||||
|
||||
// Wait for the app to be healthy
|
||||
await healthCheckPoll(app);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoServer.stop();
|
||||
await mongoose.disconnect();
|
||||
});
|
||||
|
||||
it('should return OK for /health', async () => {
|
||||
const response = await request(app).get('/health');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.text).toBe('OK');
|
||||
});
|
||||
|
||||
it('should not cache index page', async () => {
|
||||
const response = await request(app).get('/');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.headers['cache-control']).toBe('no-cache, no-store, must-revalidate');
|
||||
expect(response.headers['pragma']).toBe('no-cache');
|
||||
expect(response.headers['expires']).toBe('0');
|
||||
});
|
||||
});
|
||||
|
||||
// Polls the /health endpoint every 30ms for up to 10 seconds to wait for the server to start completely
|
||||
async function healthCheckPoll(app, retries = 0) {
|
||||
const maxRetries = Math.floor(10000 / 30); // 10 seconds / 30ms
|
||||
try {
|
||||
const response = await request(app).get('/health');
|
||||
if (response.status === 200) {
|
||||
return; // App is healthy
|
||||
}
|
||||
} catch (error) {
|
||||
// Ignore connection errors during polling
|
||||
}
|
||||
|
||||
if (retries < maxRetries) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 30));
|
||||
await healthCheckPoll(app, retries + 1);
|
||||
} else {
|
||||
throw new Error('App did not become healthy within 10 seconds.');
|
||||
}
|
||||
}
|
||||
|
|
@ -1,9 +1,13 @@
|
|||
const cookies = require('cookie');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const passport = require('passport');
|
||||
|
||||
// This middleware does not require authentication,
|
||||
// but if the user is authenticated, it will set the user object.
|
||||
const optionalJwtAuth = (req, res, next) => {
|
||||
passport.authenticate('jwt', { session: false }, (err, user) => {
|
||||
const cookieHeader = req.headers.cookie;
|
||||
const tokenProvider = cookieHeader ? cookies.parse(cookieHeader).token_provider : null;
|
||||
const callback = (err, user) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
|
@ -11,7 +15,11 @@ const optionalJwtAuth = (req, res, next) => {
|
|||
req.user = user;
|
||||
}
|
||||
next();
|
||||
})(req, res, next);
|
||||
};
|
||||
if (tokenProvider === 'openid' && isEnabled(process.env.OPENID_REUSE_TOKENS)) {
|
||||
return passport.authenticate('openidJwt', { session: false }, callback)(req, res, next);
|
||||
}
|
||||
passport.authenticate('jwt', { session: false }, callback)(req, res, next);
|
||||
};
|
||||
|
||||
module.exports = optionalJwtAuth;
|
||||
|
|
|
|||
|
|
@ -1,5 +1,23 @@
|
|||
const passport = require('passport');
|
||||
const cookies = require('cookie');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
|
||||
const requireJwtAuth = passport.authenticate('jwt', { session: false });
|
||||
/**
|
||||
* Custom Middleware to handle JWT authentication, with support for OpenID token reuse
|
||||
* Switches between JWT and OpenID authentication based on cookies and environment settings
|
||||
*/
|
||||
const requireJwtAuth = (req, res, next) => {
|
||||
// Check if token provider is specified in cookies
|
||||
const cookieHeader = req.headers.cookie;
|
||||
const tokenProvider = cookieHeader ? cookies.parse(cookieHeader).token_provider : null;
|
||||
|
||||
// Use OpenID authentication if token provider is OpenID and OPENID_REUSE_TOKENS is enabled
|
||||
if (tokenProvider === 'openid' && isEnabled(process.env.OPENID_REUSE_TOKENS)) {
|
||||
return passport.authenticate('openidJwt', { session: false })(req, res, next);
|
||||
}
|
||||
|
||||
// Default to standard JWT authentication
|
||||
return passport.authenticate('jwt', { session: false })(req, res, next);
|
||||
};
|
||||
|
||||
module.exports = requireJwtAuth;
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
jest.mock('~/cache/getLogStores');
|
||||
const request = require('supertest');
|
||||
const express = require('express');
|
||||
const routes = require('../');
|
||||
const configRoute = require('../config');
|
||||
// file deepcode ignore UseCsurfForExpress/test: test
|
||||
const app = express();
|
||||
app.disable('x-powered-by');
|
||||
app.use('/api/config', routes.config);
|
||||
app.use('/api/config', configRoute);
|
||||
|
||||
afterEach(() => {
|
||||
delete process.env.APP_TITLE;
|
||||
|
|
@ -24,6 +24,12 @@ afterEach(() => {
|
|||
delete process.env.GITHUB_CLIENT_SECRET;
|
||||
delete process.env.DISCORD_CLIENT_ID;
|
||||
delete process.env.DISCORD_CLIENT_SECRET;
|
||||
delete process.env.SAML_ENTRY_POINT;
|
||||
delete process.env.SAML_ISSUER;
|
||||
delete process.env.SAML_CERT;
|
||||
delete process.env.SAML_SESSION_SECRET;
|
||||
delete process.env.SAML_BUTTON_LABEL;
|
||||
delete process.env.SAML_IMAGE_URL;
|
||||
delete process.env.DOMAIN_SERVER;
|
||||
delete process.env.ALLOW_REGISTRATION;
|
||||
delete process.env.ALLOW_SOCIAL_LOGIN;
|
||||
|
|
@ -55,6 +61,12 @@ describe.skip('GET /', () => {
|
|||
process.env.GITHUB_CLIENT_SECRET = 'Test Github client Secret';
|
||||
process.env.DISCORD_CLIENT_ID = 'Test Discord client Id';
|
||||
process.env.DISCORD_CLIENT_SECRET = 'Test Discord client Secret';
|
||||
process.env.SAML_ENTRY_POINT = 'http://test-server.com';
|
||||
process.env.SAML_ISSUER = 'Test SAML Issuer';
|
||||
process.env.SAML_CERT = 'saml.pem';
|
||||
process.env.SAML_SESSION_SECRET = 'Test Secret';
|
||||
process.env.SAML_BUTTON_LABEL = 'Test SAML';
|
||||
process.env.SAML_IMAGE_URL = 'http://test-server.com';
|
||||
process.env.DOMAIN_SERVER = 'http://test-server.com';
|
||||
process.env.ALLOW_REGISTRATION = 'true';
|
||||
process.env.ALLOW_SOCIAL_LOGIN = 'true';
|
||||
|
|
@ -70,7 +82,7 @@ describe.skip('GET /', () => {
|
|||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body).toEqual({
|
||||
appTitle: 'Test Title',
|
||||
socialLogins: ['google', 'facebook', 'openid', 'github', 'discord'],
|
||||
socialLogins: ['google', 'facebook', 'openid', 'github', 'discord', 'saml'],
|
||||
discordLoginEnabled: true,
|
||||
facebookLoginEnabled: true,
|
||||
githubLoginEnabled: true,
|
||||
|
|
@ -78,6 +90,9 @@ describe.skip('GET /', () => {
|
|||
openidLoginEnabled: true,
|
||||
openidLabel: 'Test OpenID',
|
||||
openidImageUrl: 'http://test-server.com',
|
||||
samlLoginEnabled: true,
|
||||
samlLabel: 'Test SAML',
|
||||
samlImageUrl: 'http://test-server.com',
|
||||
ldap: {
|
||||
enabled: true,
|
||||
},
|
||||
|
|
|
|||
|
|
@ -107,7 +107,15 @@ router.post('/:agent_id', async (req, res) => {
|
|||
.filter((tool) => !(tool && (tool.includes(domain) || tool.includes(action_id))))
|
||||
.concat(functions.map((tool) => `${tool.function.name}${actionDelimiter}${domain}`));
|
||||
|
||||
const updatedAgent = await updateAgent(agentQuery, { tools, actions });
|
||||
// Force version update since actions are changing
|
||||
const updatedAgent = await updateAgent(
|
||||
agentQuery,
|
||||
{ tools, actions },
|
||||
{
|
||||
updatingUserId: req.user.id,
|
||||
forceVersion: true,
|
||||
},
|
||||
);
|
||||
|
||||
// Only update user field for new actions
|
||||
const actionUpdateData = { metadata, agent_id };
|
||||
|
|
@ -172,7 +180,12 @@ router.delete('/:agent_id/:action_id', async (req, res) => {
|
|||
|
||||
const updatedTools = tools.filter((tool) => !(tool && tool.includes(domain)));
|
||||
|
||||
await updateAgent(agentQuery, { tools: updatedTools, actions: updatedActions });
|
||||
// Force version update since actions are being removed
|
||||
await updateAgent(
|
||||
agentQuery,
|
||||
{ tools: updatedTools, actions: updatedActions },
|
||||
{ updatingUserId: req.user.id, forceVersion: true },
|
||||
);
|
||||
// If admin, can delete any action, otherwise only user's actions
|
||||
const actionQuery = admin ? { action_id } : { action_id, user: req.user.id };
|
||||
await deleteAction(actionQuery);
|
||||
|
|
|
|||
|
|
@ -78,6 +78,15 @@ router.post('/:id/duplicate', checkAgentCreate, v1.duplicateAgent);
|
|||
*/
|
||||
router.delete('/:id', checkAgentCreate, v1.deleteAgent);
|
||||
|
||||
/**
|
||||
* Reverts an agent to a previous version.
|
||||
* @route POST /agents/:id/revert
|
||||
* @param {string} req.params.id - Agent identifier.
|
||||
* @param {number} req.body.version_index - Index of the version to revert to.
|
||||
* @returns {Agent} 200 - success response - application/json
|
||||
*/
|
||||
router.post('/:id/revert', checkGlobalAgentShare, v1.revertAgentVersion);
|
||||
|
||||
/**
|
||||
* Returns a list of agents.
|
||||
* @route GET /agents
|
||||
|
|
|
|||
|
|
@ -37,6 +37,18 @@ router.get('/', async function (req, res) {
|
|||
const ldap = getLdapConfig();
|
||||
|
||||
try {
|
||||
const isOpenIdEnabled =
|
||||
!!process.env.OPENID_CLIENT_ID &&
|
||||
!!process.env.OPENID_CLIENT_SECRET &&
|
||||
!!process.env.OPENID_ISSUER &&
|
||||
!!process.env.OPENID_SESSION_SECRET;
|
||||
|
||||
const isSamlEnabled =
|
||||
!!process.env.SAML_ENTRY_POINT &&
|
||||
!!process.env.SAML_ISSUER &&
|
||||
!!process.env.SAML_CERT &&
|
||||
!!process.env.SAML_SESSION_SECRET;
|
||||
|
||||
/** @type {TStartupConfig} */
|
||||
const payload = {
|
||||
appTitle: process.env.APP_TITLE || 'LibreChat',
|
||||
|
|
@ -51,14 +63,13 @@ router.get('/', async function (req, res) {
|
|||
!!process.env.APPLE_TEAM_ID &&
|
||||
!!process.env.APPLE_KEY_ID &&
|
||||
!!process.env.APPLE_PRIVATE_KEY_PATH,
|
||||
openidLoginEnabled:
|
||||
!!process.env.OPENID_CLIENT_ID &&
|
||||
!!process.env.OPENID_CLIENT_SECRET &&
|
||||
!!process.env.OPENID_ISSUER &&
|
||||
!!process.env.OPENID_SESSION_SECRET,
|
||||
openidLoginEnabled: isOpenIdEnabled,
|
||||
openidLabel: process.env.OPENID_BUTTON_LABEL || 'Continue with OpenID',
|
||||
openidImageUrl: process.env.OPENID_IMAGE_URL,
|
||||
openidAutoRedirect: isEnabled(process.env.OPENID_AUTO_REDIRECT),
|
||||
samlLoginEnabled: !isOpenIdEnabled && isSamlEnabled,
|
||||
samlLabel: process.env.SAML_BUTTON_LABEL,
|
||||
samlImageUrl: process.env.SAML_IMAGE_URL,
|
||||
serverDomain: process.env.DOMAIN_SERVER || 'http://localhost:3080',
|
||||
emailLoginEnabled,
|
||||
registrationEnabled: !ldap?.enabled && isEnabled(process.env.ALLOW_REGISTRATION),
|
||||
|
|
@ -85,6 +96,26 @@ router.get('/', async function (req, res) {
|
|||
bundlerURL: process.env.SANDPACK_BUNDLER_URL,
|
||||
staticBundlerURL: process.env.SANDPACK_STATIC_BUNDLER_URL,
|
||||
};
|
||||
/** @type {TCustomConfig['webSearch']} */
|
||||
const webSearchConfig = req.app.locals.webSearch;
|
||||
if (
|
||||
webSearchConfig != null &&
|
||||
(webSearchConfig.searchProvider ||
|
||||
webSearchConfig.scraperType ||
|
||||
webSearchConfig.rerankerType)
|
||||
) {
|
||||
payload.webSearch = {};
|
||||
}
|
||||
|
||||
if (webSearchConfig?.searchProvider) {
|
||||
payload.webSearch.searchProvider = webSearchConfig.searchProvider;
|
||||
}
|
||||
if (webSearchConfig?.scraperType) {
|
||||
payload.webSearch.scraperType = webSearchConfig.scraperType;
|
||||
}
|
||||
if (webSearchConfig?.rerankerType) {
|
||||
payload.webSearch.rerankerType = webSearchConfig.rerankerType;
|
||||
}
|
||||
|
||||
if (ldap) {
|
||||
payload.ldap = ldap;
|
||||
|
|
|
|||
|
|
@ -74,7 +74,7 @@ router.post('/gen_title', async (req, res) => {
|
|||
res.status(200).json({ title });
|
||||
} else {
|
||||
res.status(404).json({
|
||||
message: 'Title not found or method not implemented for the conversation\'s endpoint',
|
||||
message: "Title not found or method not implemented for the conversation's endpoint",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
|
|
|||
|
|
@ -121,6 +121,14 @@ router.delete('/', async (req, res) => {
|
|||
await processDeleteRequest({ req, files: assistantFiles });
|
||||
res.status(200).json({ message: 'File associations removed successfully from assistant' });
|
||||
return;
|
||||
} else if (
|
||||
req.body.assistant_id &&
|
||||
req.body.files?.[0]?.filepath === EModelEndpoint.azureAssistants
|
||||
) {
|
||||
await processDeleteRequest({ req, files: req.body.files });
|
||||
return res
|
||||
.status(200)
|
||||
.json({ message: 'File associations removed successfully from Azure Assistant' });
|
||||
}
|
||||
|
||||
await processDeleteRequest({ req, files: dbFiles });
|
||||
|
|
@ -275,6 +283,10 @@ router.post('/', async (req, res) => {
|
|||
message += ': ' + error.message;
|
||||
}
|
||||
|
||||
if (error.message?.includes('Invalid file format')) {
|
||||
message = error.message;
|
||||
}
|
||||
|
||||
// TODO: delete remote file if it exists
|
||||
try {
|
||||
await fs.unlink(req.file.path);
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
// file deepcode ignore NoRateLimitingForLogin: Rate limiting is handled by the `loginLimiter` middleware
|
||||
const express = require('express');
|
||||
const passport = require('passport');
|
||||
const { randomState } = require('openid-client');
|
||||
const {
|
||||
checkBan,
|
||||
logHeaders,
|
||||
|
|
@ -8,7 +9,8 @@ const {
|
|||
setBalanceConfig,
|
||||
checkDomainAllowed,
|
||||
} = require('~/server/middleware');
|
||||
const { setAuthTokens } = require('~/server/services/AuthService');
|
||||
const { setAuthTokens, setOpenIDAuthTokens } = require('~/server/services/AuthService');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const router = express.Router();
|
||||
|
|
@ -28,7 +30,15 @@ const oauthHandler = async (req, res) => {
|
|||
if (req.banned) {
|
||||
return;
|
||||
}
|
||||
await setAuthTokens(req.user._id, res);
|
||||
if (
|
||||
req.user &&
|
||||
req.user.provider == 'openid' &&
|
||||
isEnabled(process.env.OPENID_REUSE_TOKENS) === true
|
||||
) {
|
||||
setOpenIDAuthTokens(req.user.tokenset, res);
|
||||
} else {
|
||||
await setAuthTokens(req.user._id, res);
|
||||
}
|
||||
res.redirect(domains.client);
|
||||
} catch (err) {
|
||||
logger.error('Error in setting authentication tokens:', err);
|
||||
|
|
@ -94,12 +104,12 @@ router.get(
|
|||
/**
|
||||
* OpenID Routes
|
||||
*/
|
||||
router.get(
|
||||
'/openid',
|
||||
passport.authenticate('openid', {
|
||||
router.get('/openid', (req, res, next) => {
|
||||
return passport.authenticate('openid', {
|
||||
session: false,
|
||||
}),
|
||||
);
|
||||
state: randomState(),
|
||||
})(req, res, next);
|
||||
});
|
||||
|
||||
router.get(
|
||||
'/openid/callback',
|
||||
|
|
@ -179,4 +189,24 @@ router.post(
|
|||
oauthHandler,
|
||||
);
|
||||
|
||||
/**
|
||||
* SAML Routes
|
||||
*/
|
||||
router.get(
|
||||
'/saml',
|
||||
passport.authenticate('saml', {
|
||||
session: false,
|
||||
}),
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/saml/callback',
|
||||
passport.authenticate('saml', {
|
||||
failureRedirect: `${domains.client}/oauth/error`,
|
||||
failureMessage: true,
|
||||
session: false,
|
||||
}),
|
||||
oauthHandler,
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
|
|
|
|||
|
|
@ -207,7 +207,7 @@ async function createActionTool({
|
|||
state: stateToken,
|
||||
userId: userId,
|
||||
client_url: metadata.auth.client_url,
|
||||
redirect_uri: `${process.env.DOMAIN_CLIENT}/api/actions/${action_id}/oauth/callback`,
|
||||
redirect_uri: `${process.env.DOMAIN_SERVER}/api/actions/${action_id}/oauth/callback`,
|
||||
/** Encrypted values */
|
||||
encrypted_oauth_client_id: encrypted.oauth_client_id,
|
||||
encrypted_oauth_client_secret: encrypted.oauth_client_secret,
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ jest.mock('./start/checks', () => ({
|
|||
checkHealth: jest.fn(),
|
||||
checkConfig: jest.fn(),
|
||||
checkAzureVariables: jest.fn(),
|
||||
checkWebSearchConfig: jest.fn(),
|
||||
}));
|
||||
|
||||
const AppService = require('./AppService');
|
||||
|
|
|
|||
|
|
@ -1,11 +1,18 @@
|
|||
const {
|
||||
FileSources,
|
||||
EModelEndpoint,
|
||||
loadOCRConfig,
|
||||
processMCPEnv,
|
||||
EModelEndpoint,
|
||||
getConfigDefaults,
|
||||
loadWebSearchConfig,
|
||||
} = require('librechat-data-provider');
|
||||
const { checkVariables, checkHealth, checkConfig, checkAzureVariables } = require('./start/checks');
|
||||
const {
|
||||
checkHealth,
|
||||
checkConfig,
|
||||
checkVariables,
|
||||
checkAzureVariables,
|
||||
checkWebSearchConfig,
|
||||
} = require('./start/checks');
|
||||
const { azureAssistantsDefaults, assistantsConfigSetup } = require('./start/assistants');
|
||||
const { initializeAzureBlobService } = require('./Files/Azure/initialize');
|
||||
const { initializeFirebase } = require('./Files/Firebase/initialize');
|
||||
|
|
@ -35,6 +42,8 @@ const AppService = async (app) => {
|
|||
const configDefaults = getConfigDefaults();
|
||||
|
||||
const ocr = loadOCRConfig(config.ocr);
|
||||
const webSearch = loadWebSearchConfig(config.webSearch);
|
||||
checkWebSearchConfig(webSearch);
|
||||
const filteredTools = config.filteredTools;
|
||||
const includedTools = config.includedTools;
|
||||
const fileStrategy = config.fileStrategy ?? configDefaults.fileStrategy;
|
||||
|
|
@ -79,6 +88,7 @@ const AppService = async (app) => {
|
|||
const defaultLocals = {
|
||||
ocr,
|
||||
paths,
|
||||
webSearch,
|
||||
fileStrategy,
|
||||
socialLogins,
|
||||
filteredTools,
|
||||
|
|
|
|||
|
|
@ -141,6 +141,14 @@ describe('AppService', () => {
|
|||
balance: { enabled: true },
|
||||
filteredTools: undefined,
|
||||
includedTools: undefined,
|
||||
webSearch: {
|
||||
cohereApiKey: '${COHERE_API_KEY}',
|
||||
firecrawlApiKey: '${FIRECRAWL_API_KEY}',
|
||||
firecrawlApiUrl: '${FIRECRAWL_API_URL}',
|
||||
jinaApiKey: '${JINA_API_KEY}',
|
||||
safeSearch: 1,
|
||||
serperApiKey: '${SERPER_API_KEY}',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -537,7 +545,7 @@ describe('AppService updating app.locals and issuing warnings', () => {
|
|||
const { logger } = require('~/config');
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining(
|
||||
'The \'assistants\' endpoint has both \'supportedIds\' and \'excludedIds\' defined.',
|
||||
"The 'assistants' endpoint has both 'supportedIds' and 'excludedIds' defined.",
|
||||
),
|
||||
);
|
||||
});
|
||||
|
|
@ -559,7 +567,7 @@ describe('AppService updating app.locals and issuing warnings', () => {
|
|||
const { logger } = require('~/config');
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining(
|
||||
'The \'assistants\' endpoint has both \'privateAssistants\' and \'supportedIds\' or \'excludedIds\' defined.',
|
||||
"The 'assistants' endpoint has both 'privateAssistants' and 'supportedIds' or 'excludedIds' defined.",
|
||||
),
|
||||
);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -377,13 +377,62 @@ const setAuthTokens = async (userId, res, sessionId = null) => {
|
|||
secure: isProduction,
|
||||
sameSite: 'strict',
|
||||
});
|
||||
|
||||
res.cookie('token_provider', 'librechat', {
|
||||
expires: new Date(refreshTokenExpires),
|
||||
httpOnly: true,
|
||||
secure: isProduction,
|
||||
sameSite: 'strict',
|
||||
});
|
||||
return token;
|
||||
} catch (error) {
|
||||
logger.error('[setAuthTokens] Error in setting authentication tokens:', error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
/**
|
||||
* @function setOpenIDAuthTokens
|
||||
* Set OpenID Authentication Tokens
|
||||
* //type tokenset from openid-client
|
||||
* @param {import('openid-client').TokenEndpointResponse & import('openid-client').TokenEndpointResponseHelpers} tokenset
|
||||
* - The tokenset object containing access and refresh tokens
|
||||
* @param {Object} res - response object
|
||||
* @returns {String} - access token
|
||||
*/
|
||||
const setOpenIDAuthTokens = (tokenset, res) => {
|
||||
try {
|
||||
if (!tokenset) {
|
||||
logger.error('[setOpenIDAuthTokens] No tokenset found in request');
|
||||
return;
|
||||
}
|
||||
const { REFRESH_TOKEN_EXPIRY } = process.env ?? {};
|
||||
const expiryInMilliseconds = eval(REFRESH_TOKEN_EXPIRY) ?? 1000 * 60 * 60 * 24 * 7; // 7 days default
|
||||
const expirationDate = new Date(Date.now() + expiryInMilliseconds);
|
||||
if (tokenset == null) {
|
||||
logger.error('[setOpenIDAuthTokens] No tokenset found in request');
|
||||
return;
|
||||
}
|
||||
if (!tokenset.access_token || !tokenset.refresh_token) {
|
||||
logger.error('[setOpenIDAuthTokens] No access or refresh token found in tokenset');
|
||||
return;
|
||||
}
|
||||
res.cookie('refreshToken', tokenset.refresh_token, {
|
||||
expires: expirationDate,
|
||||
httpOnly: true,
|
||||
secure: isProduction,
|
||||
sameSite: 'strict',
|
||||
});
|
||||
res.cookie('token_provider', 'openid', {
|
||||
expires: expirationDate,
|
||||
httpOnly: true,
|
||||
secure: isProduction,
|
||||
sameSite: 'strict',
|
||||
});
|
||||
return tokenset.access_token;
|
||||
} catch (error) {
|
||||
logger.error('[setOpenIDAuthTokens] Error in setting authentication tokens:', error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Resend Verification Email
|
||||
|
|
@ -452,4 +501,5 @@ module.exports = {
|
|||
resetPassword,
|
||||
requestPasswordReset,
|
||||
resendVerificationEmail,
|
||||
setOpenIDAuthTokens,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -10,17 +10,7 @@ const getLogStores = require('~/cache/getLogStores');
|
|||
* */
|
||||
async function getCustomConfig() {
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
let customConfig = await cache.get(CacheKeys.CUSTOM_CONFIG);
|
||||
|
||||
if (!customConfig) {
|
||||
customConfig = await loadCustomConfig();
|
||||
}
|
||||
|
||||
if (!customConfig) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return customConfig;
|
||||
return (await cache.get(CacheKeys.CUSTOM_CONFIG)) || (await loadCustomConfig());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -29,7 +29,14 @@ async function loadConfigEndpoints(req) {
|
|||
|
||||
for (let i = 0; i < customEndpoints.length; i++) {
|
||||
const endpoint = customEndpoints[i];
|
||||
const { baseURL, apiKey, name: configName, iconURL, modelDisplayLabel } = endpoint;
|
||||
const {
|
||||
baseURL,
|
||||
apiKey,
|
||||
name: configName,
|
||||
iconURL,
|
||||
modelDisplayLabel,
|
||||
customParams,
|
||||
} = endpoint;
|
||||
const name = normalizeEndpointName(configName);
|
||||
|
||||
const resolvedApiKey = extractEnvVariable(apiKey);
|
||||
|
|
@ -41,6 +48,7 @@ async function loadConfigEndpoints(req) {
|
|||
userProvideURL: isUserProvided(resolvedBaseURL),
|
||||
modelDisplayLabel,
|
||||
iconURL,
|
||||
customParams,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,10 +1,18 @@
|
|||
const path = require('path');
|
||||
const { CacheKeys, configSchema, EImageOutputType } = require('librechat-data-provider');
|
||||
const {
|
||||
CacheKeys,
|
||||
configSchema,
|
||||
EImageOutputType,
|
||||
validateSettingDefinitions,
|
||||
agentParamSettings,
|
||||
paramSettings,
|
||||
} = require('librechat-data-provider');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const loadYaml = require('~/utils/loadYaml');
|
||||
const { logger } = require('~/config');
|
||||
const axios = require('axios');
|
||||
const yaml = require('js-yaml');
|
||||
const keyBy = require('lodash/keyBy');
|
||||
|
||||
const projectRoot = path.resolve(__dirname, '..', '..', '..', '..');
|
||||
const defaultConfigPath = path.resolve(projectRoot, 'librechat.yaml');
|
||||
|
|
@ -105,6 +113,10 @@ https://www.librechat.ai/docs/configuration/stt_tts`);
|
|||
logger.debug('Custom config:', customConfig);
|
||||
}
|
||||
|
||||
(customConfig.endpoints?.custom ?? [])
|
||||
.filter((endpoint) => endpoint.customParams)
|
||||
.forEach((endpoint) => parseCustomParams(endpoint.name, endpoint.customParams));
|
||||
|
||||
if (customConfig.cache) {
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
await cache.set(CacheKeys.CUSTOM_CONFIG, customConfig);
|
||||
|
|
@ -117,4 +129,52 @@ https://www.librechat.ai/docs/configuration/stt_tts`);
|
|||
return customConfig;
|
||||
}
|
||||
|
||||
// Validate and fill out missing values for custom parameters
|
||||
function parseCustomParams(endpointName, customParams) {
|
||||
const paramEndpoint = customParams.defaultParamsEndpoint;
|
||||
customParams.paramDefinitions = customParams.paramDefinitions || [];
|
||||
|
||||
// Checks if `defaultParamsEndpoint` is a key in `paramSettings`.
|
||||
const validEndpoints = new Set([
|
||||
...Object.keys(paramSettings),
|
||||
...Object.keys(agentParamSettings),
|
||||
]);
|
||||
if (!validEndpoints.has(paramEndpoint)) {
|
||||
throw new Error(
|
||||
`defaultParamsEndpoint of "${endpointName}" endpoint is invalid. ` +
|
||||
`Valid options are ${Array.from(validEndpoints).join(', ')}`,
|
||||
);
|
||||
}
|
||||
|
||||
// creates default param maps
|
||||
const regularParams = paramSettings[paramEndpoint] ?? [];
|
||||
const agentParams = agentParamSettings[paramEndpoint] ?? [];
|
||||
const defaultParams = regularParams.concat(agentParams);
|
||||
const defaultParamsMap = keyBy(defaultParams, 'key');
|
||||
|
||||
// TODO: Remove this check once we support new parameters not part of default parameters.
|
||||
// Checks if every key in `paramDefinitions` is valid.
|
||||
const validKeys = new Set(Object.keys(defaultParamsMap));
|
||||
const paramKeys = customParams.paramDefinitions.map((param) => param.key);
|
||||
if (paramKeys.some((key) => !validKeys.has(key))) {
|
||||
throw new Error(
|
||||
`paramDefinitions of "${endpointName}" endpoint contains invalid key(s). ` +
|
||||
`Valid parameter keys are ${Array.from(validKeys).join(', ')}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Fill out missing values for custom param definitions
|
||||
customParams.paramDefinitions = customParams.paramDefinitions.map((param) => {
|
||||
return { ...defaultParamsMap[param.key], ...param, optionType: 'custom' };
|
||||
});
|
||||
|
||||
try {
|
||||
validateSettingDefinitions(customParams.paramDefinitions);
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
`Custom parameter definitions for "${endpointName}" endpoint is malformed: ${e.message}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = loadCustomConfig;
|
||||
|
|
|
|||
|
|
@ -1,6 +1,34 @@
|
|||
jest.mock('axios');
|
||||
jest.mock('~/cache/getLogStores');
|
||||
jest.mock('~/utils/loadYaml');
|
||||
jest.mock('librechat-data-provider', () => {
|
||||
const actual = jest.requireActual('librechat-data-provider');
|
||||
return {
|
||||
...actual,
|
||||
paramSettings: { foo: {}, bar: {}, custom: {} },
|
||||
agentParamSettings: {
|
||||
custom: [],
|
||||
google: [
|
||||
{
|
||||
key: 'pressure',
|
||||
type: 'string',
|
||||
component: 'input',
|
||||
},
|
||||
{
|
||||
key: 'temperature',
|
||||
type: 'number',
|
||||
component: 'slider',
|
||||
default: 0.5,
|
||||
range: {
|
||||
min: 0,
|
||||
max: 2,
|
||||
step: 0.01,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
const axios = require('axios');
|
||||
const loadCustomConfig = require('./loadCustomConfig');
|
||||
|
|
@ -150,4 +178,126 @@ describe('loadCustomConfig', () => {
|
|||
expect(logger.info).toHaveBeenCalledWith(JSON.stringify(mockConfig, null, 2));
|
||||
expect(logger.debug).toHaveBeenCalledWith('Custom config:', mockConfig);
|
||||
});
|
||||
|
||||
describe('parseCustomParams', () => {
|
||||
const mockConfig = {
|
||||
version: '1.0',
|
||||
cache: false,
|
||||
endpoints: {
|
||||
custom: [
|
||||
{
|
||||
name: 'Google',
|
||||
apiKey: 'user_provided',
|
||||
customParams: {},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
async function loadCustomParams(customParams) {
|
||||
mockConfig.endpoints.custom[0].customParams = customParams;
|
||||
loadYaml.mockReturnValue(mockConfig);
|
||||
return await loadCustomConfig();
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
process.env.CONFIG_PATH = 'validConfig.yaml';
|
||||
});
|
||||
|
||||
it('returns no error when customParams is undefined', async () => {
|
||||
const result = await loadCustomParams(undefined);
|
||||
expect(result).toEqual(mockConfig);
|
||||
});
|
||||
|
||||
it('returns no error when customParams is valid', async () => {
|
||||
const result = await loadCustomParams({
|
||||
defaultParamsEndpoint: 'google',
|
||||
paramDefinitions: [
|
||||
{
|
||||
key: 'temperature',
|
||||
default: 0.5,
|
||||
},
|
||||
],
|
||||
});
|
||||
expect(result).toEqual(mockConfig);
|
||||
});
|
||||
|
||||
it('throws an error when paramDefinitions contain unsupported keys', async () => {
|
||||
const malformedCustomParams = {
|
||||
defaultParamsEndpoint: 'google',
|
||||
paramDefinitions: [
|
||||
{ key: 'temperature', default: 0.5 },
|
||||
{ key: 'unsupportedKey', range: 0.5 },
|
||||
],
|
||||
};
|
||||
await expect(loadCustomParams(malformedCustomParams)).rejects.toThrow(
|
||||
'paramDefinitions of "Google" endpoint contains invalid key(s). Valid parameter keys are pressure, temperature',
|
||||
);
|
||||
});
|
||||
|
||||
it('throws an error when paramDefinitions is malformed', async () => {
|
||||
const malformedCustomParams = {
|
||||
defaultParamsEndpoint: 'google',
|
||||
paramDefinitions: [
|
||||
{
|
||||
key: 'temperature',
|
||||
type: 'noomba',
|
||||
component: 'inpoot',
|
||||
optionType: 'custom',
|
||||
},
|
||||
],
|
||||
};
|
||||
await expect(loadCustomParams(malformedCustomParams)).rejects.toThrow(
|
||||
/Custom parameter definitions for "Google" endpoint is malformed:/,
|
||||
);
|
||||
});
|
||||
|
||||
it('throws an error when defaultParamsEndpoint is not provided', async () => {
|
||||
const malformedCustomParams = { defaultParamsEndpoint: undefined };
|
||||
await expect(loadCustomParams(malformedCustomParams)).rejects.toThrow(
|
||||
'defaultParamsEndpoint of "Google" endpoint is invalid. Valid options are foo, bar, custom, google',
|
||||
);
|
||||
});
|
||||
|
||||
it('fills the paramDefinitions with missing values', async () => {
|
||||
const customParams = {
|
||||
defaultParamsEndpoint: 'google',
|
||||
paramDefinitions: [
|
||||
{ key: 'temperature', default: 0.7, range: { min: 0.1, max: 0.9, step: 0.1 } },
|
||||
{ key: 'pressure', component: 'textarea' },
|
||||
],
|
||||
};
|
||||
|
||||
const parsedConfig = await loadCustomParams(customParams);
|
||||
const paramDefinitions = parsedConfig.endpoints.custom[0].customParams.paramDefinitions;
|
||||
expect(paramDefinitions).toEqual([
|
||||
{
|
||||
columnSpan: 1,
|
||||
component: 'slider',
|
||||
default: 0.7, // overridden
|
||||
includeInput: true,
|
||||
key: 'temperature',
|
||||
label: 'temperature',
|
||||
optionType: 'custom',
|
||||
range: {
|
||||
// overridden
|
||||
max: 0.9,
|
||||
min: 0.1,
|
||||
step: 0.1,
|
||||
},
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
columnSpan: 1,
|
||||
component: 'textarea', // overridden
|
||||
key: 'pressure',
|
||||
label: 'pressure',
|
||||
optionType: 'custom',
|
||||
placeholder: '',
|
||||
type: 'string',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -15,20 +15,14 @@ function checkPromptCacheSupport(modelName) {
|
|||
return false;
|
||||
}
|
||||
|
||||
if (
|
||||
modelMatch === 'claude-3-7-sonnet' ||
|
||||
modelMatch === 'claude-3-5-sonnet' ||
|
||||
modelMatch === 'claude-3-5-haiku' ||
|
||||
modelMatch === 'claude-3-haiku' ||
|
||||
modelMatch === 'claude-3-opus' ||
|
||||
modelMatch === 'claude-3.7-sonnet' ||
|
||||
modelMatch === 'claude-3.5-sonnet' ||
|
||||
modelMatch === 'claude-3.5-haiku'
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
return (
|
||||
/claude-3[-.]7/.test(modelMatch) ||
|
||||
/claude-3[-.]5-(?:sonnet|haiku)/.test(modelMatch) ||
|
||||
/claude-3-(?:sonnet|haiku|opus)?/.test(modelMatch) ||
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(modelMatch) ||
|
||||
/claude-[4-9]-(?:sonnet|opus|haiku)?/.test(modelMatch) ||
|
||||
/claude-4(?:-(?:sonnet|opus|haiku))?/.test(modelMatch)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -51,6 +45,14 @@ function getClaudeHeaders(model, supportsCacheControl) {
|
|||
'anthropic-beta':
|
||||
'token-efficient-tools-2025-02-19,output-128k-2025-02-19,prompt-caching-2024-07-31',
|
||||
};
|
||||
} else if (
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(model) ||
|
||||
/claude-[4-9]-(?:sonnet|opus|haiku)?/.test(model) ||
|
||||
/claude-4(?:-(?:sonnet|opus|haiku))?/.test(model)
|
||||
) {
|
||||
return {
|
||||
'anthropic-beta': 'prompt-caching-2024-07-31',
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
'anthropic-beta': 'prompt-caching-2024-07-31',
|
||||
|
|
@ -72,7 +74,8 @@ function configureReasoning(anthropicInput, extendedOptions = {}) {
|
|||
if (
|
||||
extendedOptions.thinking &&
|
||||
updatedOptions?.model &&
|
||||
/claude-3[-.]7/.test(updatedOptions.model)
|
||||
(/claude-3[-.]7/.test(updatedOptions.model) ||
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(updatedOptions.model))
|
||||
) {
|
||||
updatedOptions.thinking = {
|
||||
type: 'enabled',
|
||||
|
|
|
|||
|
|
@ -25,10 +25,10 @@ const getOptions = async ({ req, overrideModel, endpointOption }) => {
|
|||
let credentials = isUserProvided
|
||||
? await getUserKey({ userId: req.user.id, name: EModelEndpoint.bedrock })
|
||||
: {
|
||||
accessKeyId: BEDROCK_AWS_ACCESS_KEY_ID,
|
||||
secretAccessKey: BEDROCK_AWS_SECRET_ACCESS_KEY,
|
||||
...(BEDROCK_AWS_SESSION_TOKEN && { sessionToken: BEDROCK_AWS_SESSION_TOKEN }),
|
||||
};
|
||||
accessKeyId: BEDROCK_AWS_ACCESS_KEY_ID,
|
||||
secretAccessKey: BEDROCK_AWS_SECRET_ACCESS_KEY,
|
||||
...(BEDROCK_AWS_SESSION_TOKEN && { sessionToken: BEDROCK_AWS_SESSION_TOKEN }),
|
||||
};
|
||||
|
||||
if (!credentials) {
|
||||
throw new Error('Bedrock credentials not provided. Please provide them again.');
|
||||
|
|
|
|||
|
|
@ -105,6 +105,7 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid
|
|||
headers: resolvedHeaders,
|
||||
addParams: endpointConfig.addParams,
|
||||
dropParams: endpointConfig.dropParams,
|
||||
customParams: endpointConfig.customParams,
|
||||
titleConvo: endpointConfig.titleConvo,
|
||||
titleModel: endpointConfig.titleModel,
|
||||
forcePrompt: endpointConfig.forcePrompt,
|
||||
|
|
|
|||
|
|
@ -2,7 +2,12 @@
|
|||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const FormData = require('form-data');
|
||||
const { FileSources, envVarRegex, extractEnvVariable } = require('librechat-data-provider');
|
||||
const {
|
||||
FileSources,
|
||||
envVarRegex,
|
||||
extractEnvVariable,
|
||||
extractVariableName,
|
||||
} = require('librechat-data-provider');
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { logger, createAxiosInstance } = require('~/config');
|
||||
const { logAxiosError } = require('~/utils/axios');
|
||||
|
|
@ -42,7 +47,6 @@ async function uploadDocumentToMistral({
|
|||
})
|
||||
.then((res) => res.data)
|
||||
.catch((error) => {
|
||||
logger.error('Error uploading document to Mistral:', error.message);
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
|
@ -108,11 +112,6 @@ async function performOCR({
|
|||
});
|
||||
}
|
||||
|
||||
function extractVariableName(str) {
|
||||
const match = str.match(envVarRegex);
|
||||
return match ? match[1] : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads a file to the Mistral OCR API and processes the OCR result.
|
||||
*
|
||||
|
|
@ -217,8 +216,16 @@ const uploadMistralOCR = async ({ req, file, file_id, entity_id }) => {
|
|||
images,
|
||||
};
|
||||
} catch (error) {
|
||||
const message = 'Error uploading document to Mistral OCR API';
|
||||
throw new Error(logAxiosError({ error, message }));
|
||||
let message = 'Error uploading document to Mistral OCR API';
|
||||
const detail = error?.response?.data?.detail;
|
||||
if (detail && detail !== '') {
|
||||
message = detail;
|
||||
}
|
||||
|
||||
const responseMessage = error?.response?.data?.message;
|
||||
throw new Error(
|
||||
`${logAxiosError({ error, message })}${responseMessage && responseMessage !== '' ? ` - ${responseMessage}` : ''}`,
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -124,13 +124,7 @@ describe('MistralOCR Service', () => {
|
|||
fileName: 'test.pdf',
|
||||
apiKey: 'test-api-key',
|
||||
}),
|
||||
).rejects.toThrow();
|
||||
|
||||
const { logger } = require('~/config');
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Error uploading document to Mistral:'),
|
||||
expect.any(String),
|
||||
);
|
||||
).rejects.toThrow(errorMessage);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ async function deleteOpenAIFile(req, file, openai) {
|
|||
throw new Error('OpenAI returned `false` for deleted status');
|
||||
}
|
||||
logger.debug(
|
||||
`[deleteOpenAIFile] User ${req.user.id} successfully deleted ${file.file_id} from OpenAI`,
|
||||
`[deleteOpenAIFile] User ${req.user.id} successfully deleted file "${file.file_id}" from OpenAI`,
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('[deleteOpenAIFile] Error deleting file from OpenAI: ' + error.message);
|
||||
|
|
|
|||
|
|
@ -5,9 +5,10 @@ const { EModelEndpoint } = require('librechat-data-provider');
|
|||
* Resizes an image from a given buffer based on the specified resolution.
|
||||
*
|
||||
* @param {Buffer} inputBuffer - The buffer of the image to be resized.
|
||||
* @param {'low' | 'high'} resolution - The resolution to resize the image to.
|
||||
* @param {'low' | 'high' | {percentage?: number, px?: number}} resolution - The resolution to resize the image to.
|
||||
* 'low' for a maximum of 512x512 resolution,
|
||||
* 'high' for a maximum of 768x2000 resolution.
|
||||
* 'high' for a maximum of 768x2000 resolution,
|
||||
* or a custom object with percentage or px values.
|
||||
* @param {EModelEndpoint} endpoint - Identifier for specific endpoint handling
|
||||
* @returns {Promise<{buffer: Buffer, width: number, height: number}>} An object containing the resized image buffer and its dimensions.
|
||||
* @throws Will throw an error if the resolution parameter is invalid.
|
||||
|
|
@ -17,10 +18,32 @@ async function resizeImageBuffer(inputBuffer, resolution, endpoint) {
|
|||
const maxShortSideHighRes = 768;
|
||||
const maxLongSideHighRes = endpoint === EModelEndpoint.anthropic ? 1568 : 2000;
|
||||
|
||||
let customPercent, customPx;
|
||||
if (resolution && typeof resolution === 'object') {
|
||||
if (typeof resolution.percentage === 'number') {
|
||||
customPercent = resolution.percentage;
|
||||
} else if (typeof resolution.px === 'number') {
|
||||
customPx = resolution.px;
|
||||
}
|
||||
}
|
||||
|
||||
let newWidth, newHeight;
|
||||
let resizeOptions = { fit: 'inside', withoutEnlargement: true };
|
||||
|
||||
if (resolution === 'low') {
|
||||
if (customPercent != null || customPx != null) {
|
||||
// percentage-based resize
|
||||
const metadata = await sharp(inputBuffer).metadata();
|
||||
if (customPercent != null) {
|
||||
newWidth = Math.round(metadata.width * (customPercent / 100));
|
||||
newHeight = Math.round(metadata.height * (customPercent / 100));
|
||||
} else {
|
||||
// pixel max on both sides
|
||||
newWidth = Math.min(metadata.width, customPx);
|
||||
newHeight = Math.min(metadata.height, customPx);
|
||||
}
|
||||
resizeOptions.width = newWidth;
|
||||
resizeOptions.height = newHeight;
|
||||
} else if (resolution === 'low') {
|
||||
resizeOptions.width = maxLowRes;
|
||||
resizeOptions.height = maxLowRes;
|
||||
} else if (resolution === 'high') {
|
||||
|
|
|
|||
|
|
@ -137,11 +137,13 @@ const processDeleteRequest = async ({ req, files }) => {
|
|||
/** @type {Record<string, OpenAI | undefined>} */
|
||||
const client = { [FileSources.openai]: undefined, [FileSources.azure]: undefined };
|
||||
const initializeClients = async () => {
|
||||
const openAIClient = await getOpenAIClient({
|
||||
req,
|
||||
overrideEndpoint: EModelEndpoint.assistants,
|
||||
});
|
||||
client[FileSources.openai] = openAIClient.openai;
|
||||
if (req.app.locals[EModelEndpoint.assistants]) {
|
||||
const openAIClient = await getOpenAIClient({
|
||||
req,
|
||||
overrideEndpoint: EModelEndpoint.assistants,
|
||||
});
|
||||
client[FileSources.openai] = openAIClient.openai;
|
||||
}
|
||||
|
||||
if (!req.app.locals[EModelEndpoint.azureOpenAI]?.assistants) {
|
||||
return;
|
||||
|
|
@ -693,7 +695,7 @@ const processOpenAIFile = async ({
|
|||
const processOpenAIImageOutput = async ({ req, buffer, file_id, filename, fileExt }) => {
|
||||
const currentDate = new Date();
|
||||
const formattedDate = currentDate.toISOString();
|
||||
const _file = await convertImage(req, buffer, 'high', `${file_id}${fileExt}`);
|
||||
const _file = await convertImage(req, buffer, undefined, `${file_id}${fileExt}`);
|
||||
const file = {
|
||||
..._file,
|
||||
usage: 1,
|
||||
|
|
@ -838,8 +840,9 @@ function base64ToBuffer(base64String) {
|
|||
|
||||
async function saveBase64Image(
|
||||
url,
|
||||
{ req, file_id: _file_id, filename: _filename, endpoint, context, resolution = 'high' },
|
||||
{ req, file_id: _file_id, filename: _filename, endpoint, context, resolution },
|
||||
) {
|
||||
const effectiveResolution = resolution ?? req.app.locals.fileConfig?.imageGeneration ?? 'high';
|
||||
const file_id = _file_id ?? v4();
|
||||
let filename = `${file_id}-${_filename}`;
|
||||
const { buffer: inputBuffer, type } = base64ToBuffer(url);
|
||||
|
|
@ -852,7 +855,7 @@ async function saveBase64Image(
|
|||
}
|
||||
}
|
||||
|
||||
const image = await resizeImageBuffer(inputBuffer, resolution, endpoint);
|
||||
const image = await resizeImageBuffer(inputBuffer, effectiveResolution, endpoint);
|
||||
const source = req.app.locals.fileStrategy;
|
||||
const { saveBuffer } = getStrategyFunctions(source);
|
||||
const filepath = await saveBuffer({
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
const { z } = require('zod');
|
||||
const { tool } = require('@langchain/core/tools');
|
||||
const { normalizeServerName } = require('librechat-mcp');
|
||||
const { Constants: AgentConstants, Providers } = require('@librechat/agents');
|
||||
const {
|
||||
Constants,
|
||||
|
|
@ -38,6 +39,7 @@ async function createMCPTool({ req, toolKey, provider: _provider }) {
|
|||
}
|
||||
|
||||
const [toolName, serverName] = toolKey.split(Constants.mcp_delimiter);
|
||||
const normalizedToolKey = `${toolName}${Constants.mcp_delimiter}${normalizeServerName(serverName)}`;
|
||||
|
||||
if (!req.user?.id) {
|
||||
logger.error(
|
||||
|
|
@ -83,7 +85,7 @@ async function createMCPTool({ req, toolKey, provider: _provider }) {
|
|||
|
||||
const toolInstance = tool(_call, {
|
||||
schema,
|
||||
name: toolKey,
|
||||
name: normalizedToolKey,
|
||||
description: description || '',
|
||||
responseFormat: AgentConstants.CONTENT_AND_ARTIFACT,
|
||||
});
|
||||
|
|
|
|||
|
|
@ -66,16 +66,26 @@ const getUserPluginAuthValue = async (userId, authField, throwError = true) => {
|
|||
// }
|
||||
// };
|
||||
|
||||
/**
|
||||
*
|
||||
* @async
|
||||
* @param {string} userId
|
||||
* @param {string} authField
|
||||
* @param {string} pluginKey
|
||||
* @param {string} value
|
||||
* @returns {Promise<IPluginAuth>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
const updateUserPluginAuth = async (userId, authField, pluginKey, value) => {
|
||||
try {
|
||||
const encryptedValue = await encrypt(value);
|
||||
const pluginAuth = await PluginAuth.findOne({ userId, authField }).lean();
|
||||
if (pluginAuth) {
|
||||
const pluginAuth = await PluginAuth.updateOne(
|
||||
return await PluginAuth.findOneAndUpdate(
|
||||
{ userId, authField },
|
||||
{ $set: { value: encryptedValue } },
|
||||
);
|
||||
return pluginAuth;
|
||||
{ new: true, upsert: true },
|
||||
).lean();
|
||||
} else {
|
||||
const newPluginAuth = await new PluginAuth({
|
||||
userId,
|
||||
|
|
@ -84,7 +94,7 @@ const updateUserPluginAuth = async (userId, authField, pluginKey, value) => {
|
|||
pluginKey,
|
||||
});
|
||||
await newPluginAuth.save();
|
||||
return newPluginAuth;
|
||||
return newPluginAuth.toObject();
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('[updateUserPluginAuth]', err);
|
||||
|
|
@ -92,6 +102,14 @@ const updateUserPluginAuth = async (userId, authField, pluginKey, value) => {
|
|||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @async
|
||||
* @param {string} userId
|
||||
* @param {string} authField
|
||||
* @param {boolean} [all]
|
||||
* @returns {Promise<import('mongoose').DeleteResult>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
const deleteUserPluginAuth = async (userId, authField, all = false) => {
|
||||
if (all) {
|
||||
try {
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { zodToJsonSchema } = require('zod-to-json-schema');
|
||||
const { tool: toolFn, Tool, DynamicStructuredTool } = require('@langchain/core/tools');
|
||||
const { Calculator } = require('@langchain/community/tools/calculator');
|
||||
const { tool: toolFn, Tool, DynamicStructuredTool } = require('@langchain/core/tools');
|
||||
const {
|
||||
Tools,
|
||||
Constants,
|
||||
ErrorTypes,
|
||||
ContentTypes,
|
||||
imageGenTools,
|
||||
|
|
@ -14,6 +15,7 @@ const {
|
|||
ImageVisionTool,
|
||||
openapiToFunction,
|
||||
AgentCapabilities,
|
||||
defaultAgentCapabilities,
|
||||
validateAndParseOpenAPISpec,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
|
|
@ -29,6 +31,7 @@ const {
|
|||
toolkits,
|
||||
} = require('~/app/clients/tools');
|
||||
const { processFileURL, uploadImageBuffer } = require('~/server/services/Files/process');
|
||||
const { createOnSearchResults } = require('~/server/services/Tools/search');
|
||||
const { isActionDomainAllowed } = require('~/server/services/domains');
|
||||
const { getEndpointsConfig } = require('~/server/services/Config');
|
||||
const { recordUsage } = require('~/server/services/Threads');
|
||||
|
|
@ -500,15 +503,33 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey })
|
|||
}
|
||||
|
||||
const endpointsConfig = await getEndpointsConfig(req);
|
||||
const enabledCapabilities = new Set(endpointsConfig?.[EModelEndpoint.agents]?.capabilities ?? []);
|
||||
const checkCapability = (capability) => enabledCapabilities.has(capability);
|
||||
let enabledCapabilities = new Set(endpointsConfig?.[EModelEndpoint.agents]?.capabilities ?? []);
|
||||
/** Edge case: use defined/fallback capabilities when the "agents" endpoint is not enabled */
|
||||
if (enabledCapabilities.size === 0 && agent.id === Constants.EPHEMERAL_AGENT_ID) {
|
||||
enabledCapabilities = new Set(
|
||||
req.app?.locals?.[EModelEndpoint.agents]?.capabilities ?? defaultAgentCapabilities,
|
||||
);
|
||||
}
|
||||
const checkCapability = (capability) => {
|
||||
const enabled = enabledCapabilities.has(capability);
|
||||
if (!enabled) {
|
||||
logger.warn(
|
||||
`Capability "${capability}" disabled${capability === AgentCapabilities.tools ? '.' : ' despite configured tool.'} User: ${req.user.id} | Agent: ${agent.id}`,
|
||||
);
|
||||
}
|
||||
return enabled;
|
||||
};
|
||||
const areToolsEnabled = checkCapability(AgentCapabilities.tools);
|
||||
|
||||
let includesWebSearch = false;
|
||||
const _agentTools = agent.tools?.filter((tool) => {
|
||||
if (tool === Tools.file_search) {
|
||||
return checkCapability(AgentCapabilities.file_search);
|
||||
} else if (tool === Tools.execute_code) {
|
||||
return checkCapability(AgentCapabilities.execute_code);
|
||||
} else if (tool === Tools.web_search) {
|
||||
includesWebSearch = checkCapability(AgentCapabilities.web_search);
|
||||
return includesWebSearch;
|
||||
} else if (!areToolsEnabled && !tool.includes(actionDelimiter)) {
|
||||
return false;
|
||||
}
|
||||
|
|
@ -518,7 +539,11 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey })
|
|||
if (!_agentTools || _agentTools.length === 0) {
|
||||
return {};
|
||||
}
|
||||
|
||||
/** @type {ReturnType<createOnSearchResults>} */
|
||||
let webSearchCallbacks;
|
||||
if (includesWebSearch) {
|
||||
webSearchCallbacks = createOnSearchResults(res);
|
||||
}
|
||||
const { loadedTools, toolContextMap } = await loadTools({
|
||||
agent,
|
||||
functions: true,
|
||||
|
|
@ -532,6 +557,7 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey })
|
|||
uploadImageBuffer,
|
||||
returnMetadata: true,
|
||||
fileStrategy: req.app.locals.fileStrategy,
|
||||
[Tools.web_search]: webSearchCallbacks,
|
||||
},
|
||||
});
|
||||
|
||||
|
|
|
|||
122
api/server/services/Tools/search.js
Normal file
122
api/server/services/Tools/search.js
Normal file
|
|
@ -0,0 +1,122 @@
|
|||
const { nanoid } = require('nanoid');
|
||||
const { Tools } = require('librechat-data-provider');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Creates a function to handle search results and stream them as attachments
|
||||
* @param {import('http').ServerResponse} res - The HTTP server response object
|
||||
* @returns {{ onSearchResults: function(SearchResult, GraphRunnableConfig): void; onGetHighlights: function(string): void}} - Function that takes search results and returns or streams an attachment
|
||||
*/
|
||||
function createOnSearchResults(res) {
|
||||
const context = {
|
||||
sourceMap: new Map(),
|
||||
searchResultData: undefined,
|
||||
toolCallId: undefined,
|
||||
attachmentName: undefined,
|
||||
messageId: undefined,
|
||||
conversationId: undefined,
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {SearchResult} results
|
||||
* @param {GraphRunnableConfig} runnableConfig
|
||||
*/
|
||||
function onSearchResults(results, runnableConfig) {
|
||||
logger.info(
|
||||
`[onSearchResults] user: ${runnableConfig.metadata.user_id} | thread_id: ${runnableConfig.metadata.thread_id} | run_id: ${runnableConfig.metadata.run_id}`,
|
||||
results,
|
||||
);
|
||||
|
||||
if (!results.success) {
|
||||
logger.error(
|
||||
`[onSearchResults] user: ${runnableConfig.metadata.user_id} | thread_id: ${runnableConfig.metadata.thread_id} | run_id: ${runnableConfig.metadata.run_id} | error: ${results.error}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const turn = runnableConfig.toolCall?.turn ?? 0;
|
||||
const data = { turn, ...structuredClone(results.data ?? {}) };
|
||||
context.searchResultData = data;
|
||||
|
||||
// Map sources to links
|
||||
for (let i = 0; i < data.organic.length; i++) {
|
||||
const source = data.organic[i];
|
||||
if (source.link) {
|
||||
context.sourceMap.set(source.link, {
|
||||
type: 'organic',
|
||||
index: i,
|
||||
turn,
|
||||
});
|
||||
}
|
||||
}
|
||||
for (let i = 0; i < data.topStories.length; i++) {
|
||||
const source = data.topStories[i];
|
||||
if (source.link) {
|
||||
context.sourceMap.set(source.link, {
|
||||
type: 'topStories',
|
||||
index: i,
|
||||
turn,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
context.toolCallId = runnableConfig.toolCall.id;
|
||||
context.messageId = runnableConfig.metadata.run_id;
|
||||
context.conversationId = runnableConfig.metadata.thread_id;
|
||||
context.attachmentName = `${runnableConfig.toolCall.name}_${context.toolCallId}_${nanoid()}`;
|
||||
|
||||
const attachment = buildAttachment(context);
|
||||
|
||||
if (!res.headersSent) {
|
||||
return attachment;
|
||||
}
|
||||
res.write(`event: attachment\ndata: ${JSON.stringify(attachment)}\n\n`);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} link
|
||||
* @returns {void}
|
||||
*/
|
||||
function onGetHighlights(link) {
|
||||
const source = context.sourceMap.get(link);
|
||||
if (!source) {
|
||||
return;
|
||||
}
|
||||
const { type, index } = source;
|
||||
const data = context.searchResultData;
|
||||
if (!data) {
|
||||
return;
|
||||
}
|
||||
if (data[type][index] != null) {
|
||||
data[type][index].processed = true;
|
||||
}
|
||||
|
||||
const attachment = buildAttachment(context);
|
||||
res.write(`event: attachment\ndata: ${JSON.stringify(attachment)}\n\n`);
|
||||
}
|
||||
|
||||
return {
|
||||
onSearchResults,
|
||||
onGetHighlights,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to build an attachment object
|
||||
* @param {object} context - The context containing attachment data
|
||||
* @returns {object} - The attachment object
|
||||
*/
|
||||
function buildAttachment(context) {
|
||||
return {
|
||||
messageId: context.messageId,
|
||||
toolCallId: context.toolCallId,
|
||||
conversationId: context.conversationId,
|
||||
name: context.attachmentName,
|
||||
type: Tools.web_search,
|
||||
[Tools.web_search]: context.searchResultData,
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createOnSearchResults,
|
||||
};
|
||||
|
|
@ -1,7 +1,9 @@
|
|||
const {
|
||||
Constants,
|
||||
webSearchKeys,
|
||||
deprecatedAzureVariables,
|
||||
conflictingAzureVariables,
|
||||
extractVariableName,
|
||||
} = require('librechat-data-provider');
|
||||
const { isEnabled, checkEmailConfig } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
|
@ -141,4 +143,56 @@ function checkPasswordReset() {
|
|||
}
|
||||
}
|
||||
|
||||
module.exports = { checkVariables, checkHealth, checkConfig, checkAzureVariables };
|
||||
/**
|
||||
* Checks web search configuration values to ensure they are environment variable references.
|
||||
* Warns if actual API keys or URLs are used instead of environment variable references.
|
||||
* Logs debug information for properly configured environment variable references.
|
||||
* @param {Object} webSearchConfig - The loaded web search configuration object.
|
||||
*/
|
||||
function checkWebSearchConfig(webSearchConfig) {
|
||||
if (!webSearchConfig) {
|
||||
return;
|
||||
}
|
||||
|
||||
webSearchKeys.forEach((key) => {
|
||||
const value = webSearchConfig[key];
|
||||
|
||||
if (typeof value === 'string') {
|
||||
const varName = extractVariableName(value);
|
||||
|
||||
if (varName) {
|
||||
// This is a proper environment variable reference
|
||||
const actualValue = process.env[varName];
|
||||
if (actualValue) {
|
||||
logger.debug(`Web search ${key}: Using environment variable ${varName} with value set`);
|
||||
} else {
|
||||
logger.debug(
|
||||
`Web search ${key}: Using environment variable ${varName} (not set in environment, user provided value)`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// This is not an environment variable reference - warn user
|
||||
logger.warn(
|
||||
`❗ Web search configuration error: ${key} contains an actual value instead of an environment variable reference.
|
||||
|
||||
Current value: "${value.substring(0, 10)}..."
|
||||
|
||||
This is incorrect! You should use environment variable references in your librechat.yaml file, such as:
|
||||
${key}: "\${YOUR_ENV_VAR_NAME}"
|
||||
|
||||
Then set the actual API key in your .env file or environment variables.
|
||||
|
||||
More info: https://www.librechat.ai/docs/configuration/librechat_yaml/web_search`,
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
checkHealth,
|
||||
checkConfig,
|
||||
checkVariables,
|
||||
checkAzureVariables,
|
||||
checkWebSearchConfig,
|
||||
};
|
||||
|
|
|
|||
203
api/server/services/start/checks.spec.js
Normal file
203
api/server/services/start/checks.spec.js
Normal file
|
|
@ -0,0 +1,203 @@
|
|||
// Mock librechat-data-provider
|
||||
jest.mock('librechat-data-provider', () => ({
|
||||
...jest.requireActual('librechat-data-provider'),
|
||||
extractVariableName: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock the config logger
|
||||
jest.mock('~/config', () => ({
|
||||
logger: {
|
||||
debug: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
const { checkWebSearchConfig } = require('./checks');
|
||||
const { logger } = require('~/config');
|
||||
const { extractVariableName } = require('librechat-data-provider');
|
||||
|
||||
describe('checkWebSearchConfig', () => {
|
||||
let originalEnv;
|
||||
|
||||
beforeEach(() => {
|
||||
// Clear all mocks
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Store original environment
|
||||
originalEnv = process.env;
|
||||
|
||||
// Reset process.env
|
||||
process.env = { ...originalEnv };
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original environment
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
describe('when webSearchConfig is undefined or null', () => {
|
||||
it('should return early without logging when config is undefined', () => {
|
||||
checkWebSearchConfig(undefined);
|
||||
|
||||
expect(logger.debug).not.toHaveBeenCalled();
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return early without logging when config is null', () => {
|
||||
checkWebSearchConfig(null);
|
||||
|
||||
expect(logger.debug).not.toHaveBeenCalled();
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when config values are proper environment variable references', () => {
|
||||
it('should log debug message for each valid environment variable with value set', () => {
|
||||
const config = {
|
||||
serperApiKey: '${SERPER_API_KEY}',
|
||||
jinaApiKey: '${JINA_API_KEY}',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValueOnce('SERPER_API_KEY').mockReturnValueOnce('JINA_API_KEY');
|
||||
|
||||
process.env.SERPER_API_KEY = 'test-serper-key';
|
||||
process.env.JINA_API_KEY = 'test-jina-key';
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(extractVariableName).toHaveBeenCalledWith('${SERPER_API_KEY}');
|
||||
expect(extractVariableName).toHaveBeenCalledWith('${JINA_API_KEY}');
|
||||
expect(logger.debug).toHaveBeenCalledWith(
|
||||
'Web search serperApiKey: Using environment variable SERPER_API_KEY with value set',
|
||||
);
|
||||
expect(logger.debug).toHaveBeenCalledWith(
|
||||
'Web search jinaApiKey: Using environment variable JINA_API_KEY with value set',
|
||||
);
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should log debug message for environment variables not set in environment', () => {
|
||||
const config = {
|
||||
cohereApiKey: '${COHERE_API_KEY}',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValue('COHERE_API_KEY');
|
||||
|
||||
delete process.env.COHERE_API_KEY;
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(logger.debug).toHaveBeenCalledWith(
|
||||
'Web search cohereApiKey: Using environment variable COHERE_API_KEY (not set in environment, user provided value)',
|
||||
);
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when config values are actual values instead of environment variable references', () => {
|
||||
it('should warn when serperApiKey contains actual API key', () => {
|
||||
const config = {
|
||||
serperApiKey: 'sk-1234567890abcdef',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValue(null);
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining(
|
||||
'❗ Web search configuration error: serperApiKey contains an actual value',
|
||||
),
|
||||
);
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Current value: "sk-1234567..."'),
|
||||
);
|
||||
expect(logger.debug).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should warn when firecrawlApiUrl contains actual URL', () => {
|
||||
const config = {
|
||||
firecrawlApiUrl: 'https://api.firecrawl.dev',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValue(null);
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining(
|
||||
'❗ Web search configuration error: firecrawlApiUrl contains an actual value',
|
||||
),
|
||||
);
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Current value: "https://ap..."'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should include documentation link in warning message', () => {
|
||||
const config = {
|
||||
firecrawlApiKey: 'fc-actual-key',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValue(null);
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining(
|
||||
'More info: https://www.librechat.ai/docs/configuration/librechat_yaml/web_search',
|
||||
),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when config contains mixed value types', () => {
|
||||
it('should only process string values and ignore non-string values', () => {
|
||||
const config = {
|
||||
serperApiKey: '${SERPER_API_KEY}',
|
||||
safeSearch: 1,
|
||||
scraperTimeout: 7500,
|
||||
jinaApiKey: 'actual-key',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValueOnce('SERPER_API_KEY').mockReturnValueOnce(null);
|
||||
|
||||
process.env.SERPER_API_KEY = 'test-key';
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(extractVariableName).toHaveBeenCalledTimes(2);
|
||||
expect(logger.debug).toHaveBeenCalledTimes(1);
|
||||
expect(logger.warn).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle config with no web search keys', () => {
|
||||
const config = {
|
||||
someOtherKey: 'value',
|
||||
anotherKey: '${SOME_VAR}',
|
||||
};
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(extractVariableName).not.toHaveBeenCalled();
|
||||
expect(logger.debug).not.toHaveBeenCalled();
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should truncate long values in warning messages', () => {
|
||||
const config = {
|
||||
serperApiKey: 'this-is-a-very-long-api-key-that-should-be-truncated-in-the-warning-message',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValue(null);
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Current value: "this-is-a-..."'),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -38,6 +38,7 @@ async function loadDefaultInterface(config, configDefaults, roleName = SystemRol
|
|||
agents: interfaceConfig?.agents ?? defaults.agents,
|
||||
temporaryChat: interfaceConfig?.temporaryChat ?? defaults.temporaryChat,
|
||||
runCode: interfaceConfig?.runCode ?? defaults.runCode,
|
||||
webSearch: interfaceConfig?.webSearch ?? defaults.webSearch,
|
||||
customWelcome: interfaceConfig?.customWelcome ?? defaults.customWelcome,
|
||||
});
|
||||
|
||||
|
|
@ -48,6 +49,7 @@ async function loadDefaultInterface(config, configDefaults, roleName = SystemRol
|
|||
[PermissionTypes.AGENTS]: { [Permissions.USE]: loadedInterface.agents },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: loadedInterface.temporaryChat },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: loadedInterface.runCode },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: loadedInterface.webSearch },
|
||||
});
|
||||
await updateAccessPermissions(SystemRoles.ADMIN, {
|
||||
[PermissionTypes.PROMPTS]: { [Permissions.USE]: loadedInterface.prompts },
|
||||
|
|
@ -56,6 +58,7 @@ async function loadDefaultInterface(config, configDefaults, roleName = SystemRol
|
|||
[PermissionTypes.AGENTS]: { [Permissions.USE]: loadedInterface.agents },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: loadedInterface.temporaryChat },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: loadedInterface.runCode },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: loadedInterface.webSearch },
|
||||
});
|
||||
|
||||
let i = 0;
|
||||
|
|
@ -74,7 +77,7 @@ async function loadDefaultInterface(config, configDefaults, roleName = SystemRol
|
|||
// warn about config.modelSpecs.prioritize if true and presets are enabled, that default presets will conflict with prioritizing model specs.
|
||||
if (config?.modelSpecs?.prioritize && loadedInterface.presets) {
|
||||
logger.warn(
|
||||
'Note: Prioritizing model specs can conflict with default presets if a default preset is set. It\'s recommended to disable presets from the interface or disable use of a default preset.',
|
||||
"Note: Prioritizing model specs can conflict with default presets if a default preset is set. It's recommended to disable presets from the interface or disable use of a default preset.",
|
||||
);
|
||||
i === 0 && i++;
|
||||
}
|
||||
|
|
@ -88,14 +91,14 @@ async function loadDefaultInterface(config, configDefaults, roleName = SystemRol
|
|||
loadedInterface.parameters)
|
||||
) {
|
||||
logger.warn(
|
||||
'Note: Enforcing model specs can conflict with the interface options: endpointsMenu, modelSelect, presets, and parameters. It\'s recommended to disable these options from the interface or disable enforcing model specs.',
|
||||
"Note: Enforcing model specs can conflict with the interface options: endpointsMenu, modelSelect, presets, and parameters. It's recommended to disable these options from the interface or disable enforcing model specs.",
|
||||
);
|
||||
i === 0 && i++;
|
||||
}
|
||||
// warn if enforce is true and prioritize is not, that enforcing model specs without prioritizing them can lead to unexpected behavior.
|
||||
if (config?.modelSpecs?.enforce && !config?.modelSpecs?.prioritize) {
|
||||
logger.warn(
|
||||
'Note: Enforcing model specs without prioritizing them can lead to unexpected behavior. It\'s recommended to enable prioritizing model specs if enforcing them.',
|
||||
"Note: Enforcing model specs without prioritizing them can lead to unexpected behavior. It's recommended to enable prioritizing model specs if enforcing them.",
|
||||
);
|
||||
i === 0 && i++;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ describe('loadDefaultInterface', () => {
|
|||
agents: true,
|
||||
temporaryChat: true,
|
||||
runCode: true,
|
||||
webSearch: true,
|
||||
},
|
||||
};
|
||||
const configDefaults = { interface: {} };
|
||||
|
|
@ -29,6 +30,7 @@ describe('loadDefaultInterface', () => {
|
|||
[PermissionTypes.AGENTS]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: true },
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -41,6 +43,7 @@ describe('loadDefaultInterface', () => {
|
|||
agents: false,
|
||||
temporaryChat: false,
|
||||
runCode: false,
|
||||
webSearch: false,
|
||||
},
|
||||
};
|
||||
const configDefaults = { interface: {} };
|
||||
|
|
@ -54,6 +57,7 @@ describe('loadDefaultInterface', () => {
|
|||
[PermissionTypes.AGENTS]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: false },
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -70,6 +74,7 @@ describe('loadDefaultInterface', () => {
|
|||
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -82,6 +87,7 @@ describe('loadDefaultInterface', () => {
|
|||
agents: undefined,
|
||||
temporaryChat: undefined,
|
||||
runCode: undefined,
|
||||
webSearch: undefined,
|
||||
},
|
||||
};
|
||||
const configDefaults = { interface: {} };
|
||||
|
|
@ -95,6 +101,7 @@ describe('loadDefaultInterface', () => {
|
|||
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -107,6 +114,7 @@ describe('loadDefaultInterface', () => {
|
|||
agents: true,
|
||||
temporaryChat: undefined,
|
||||
runCode: false,
|
||||
webSearch: true,
|
||||
},
|
||||
};
|
||||
const configDefaults = { interface: {} };
|
||||
|
|
@ -120,6 +128,7 @@ describe('loadDefaultInterface', () => {
|
|||
[PermissionTypes.AGENTS]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: true },
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -133,6 +142,7 @@ describe('loadDefaultInterface', () => {
|
|||
agents: true,
|
||||
temporaryChat: true,
|
||||
runCode: true,
|
||||
webSearch: true,
|
||||
},
|
||||
};
|
||||
|
||||
|
|
@ -145,6 +155,7 @@ describe('loadDefaultInterface', () => {
|
|||
[PermissionTypes.AGENTS]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: true },
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -161,6 +172,7 @@ describe('loadDefaultInterface', () => {
|
|||
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -177,6 +189,7 @@ describe('loadDefaultInterface', () => {
|
|||
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -193,6 +206,7 @@ describe('loadDefaultInterface', () => {
|
|||
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -218,6 +232,7 @@ describe('loadDefaultInterface', () => {
|
|||
[PermissionTypes.AGENTS]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -231,6 +246,7 @@ describe('loadDefaultInterface', () => {
|
|||
agents: undefined,
|
||||
temporaryChat: undefined,
|
||||
runCode: undefined,
|
||||
webSearch: undefined,
|
||||
},
|
||||
};
|
||||
|
||||
|
|
@ -243,6 +259,33 @@ describe('loadDefaultInterface', () => {
|
|||
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call updateAccessPermissions with the correct parameters when WEB_SEARCH is undefined', async () => {
|
||||
const config = {
|
||||
interface: {
|
||||
prompts: true,
|
||||
bookmarks: false,
|
||||
multiConvo: true,
|
||||
agents: false,
|
||||
temporaryChat: true,
|
||||
runCode: false,
|
||||
},
|
||||
};
|
||||
const configDefaults = { interface: {} };
|
||||
|
||||
await loadDefaultInterface(config, configDefaults);
|
||||
|
||||
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
|
||||
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -26,7 +26,16 @@ function loadTurnstileConfig(config, configDefaults) {
|
|||
options: customTurnstile.options ?? defaults.options,
|
||||
});
|
||||
|
||||
logger.info('Turnstile configuration loaded:\n' + JSON.stringify(loadedTurnstile, null, 2));
|
||||
const enabled = Boolean(loadedTurnstile.siteKey);
|
||||
|
||||
if (enabled) {
|
||||
logger.info(
|
||||
'Turnstile is ENABLED with configuration:\n' + JSON.stringify(loadedTurnstile, null, 2),
|
||||
);
|
||||
} else {
|
||||
logger.info('Turnstile is DISABLED (no siteKey provided).');
|
||||
}
|
||||
|
||||
return loadedTurnstile;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -10,6 +10,8 @@ const {
|
|||
discordLogin,
|
||||
facebookLogin,
|
||||
appleLogin,
|
||||
setupSaml,
|
||||
openIdJwtLogin,
|
||||
} = require('~/strategies');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const keyvRedis = require('~/cache/keyvRedis');
|
||||
|
|
@ -19,7 +21,7 @@ const { logger } = require('~/config');
|
|||
*
|
||||
* @param {Express.Application} app
|
||||
*/
|
||||
const configureSocialLogins = (app) => {
|
||||
const configureSocialLogins = async (app) => {
|
||||
logger.info('Configuring social logins...');
|
||||
|
||||
if (process.env.GOOGLE_CLIENT_ID && process.env.GOOGLE_CLIENT_SECRET) {
|
||||
|
|
@ -62,10 +64,41 @@ const configureSocialLogins = (app) => {
|
|||
}
|
||||
app.use(session(sessionOptions));
|
||||
app.use(passport.session());
|
||||
setupOpenId();
|
||||
|
||||
const config = await setupOpenId();
|
||||
if (isEnabled(process.env.OPENID_REUSE_TOKENS)) {
|
||||
logger.info('OpenID token reuse is enabled.');
|
||||
passport.use('openidJwt', openIdJwtLogin(config));
|
||||
}
|
||||
logger.info('OpenID Connect configured.');
|
||||
}
|
||||
if (
|
||||
process.env.SAML_ENTRY_POINT &&
|
||||
process.env.SAML_ISSUER &&
|
||||
process.env.SAML_CERT &&
|
||||
process.env.SAML_SESSION_SECRET
|
||||
) {
|
||||
logger.info('Configuring SAML Connect...');
|
||||
const sessionOptions = {
|
||||
secret: process.env.SAML_SESSION_SECRET,
|
||||
resave: false,
|
||||
saveUninitialized: false,
|
||||
};
|
||||
if (isEnabled(process.env.USE_REDIS)) {
|
||||
logger.debug('Using Redis for session storage in SAML...');
|
||||
const keyv = new Keyv({ store: keyvRedis });
|
||||
const client = keyv.opts.store.client;
|
||||
sessionOptions.store = new RedisStore({ client, prefix: 'saml_session' });
|
||||
} else {
|
||||
sessionOptions.store = new MemoryStore({
|
||||
checkPeriod: 86400000, // prune expired entries every 24h
|
||||
});
|
||||
}
|
||||
app.use(session(sessionOptions));
|
||||
app.use(passport.session());
|
||||
setupSaml();
|
||||
|
||||
logger.info('SAML Connect configured.');
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = configureSocialLogins;
|
||||
|
|
|
|||
|
|
@ -4,10 +4,10 @@ const {
|
|||
Capabilities,
|
||||
EModelEndpoint,
|
||||
isAgentsEndpoint,
|
||||
AgentCapabilities,
|
||||
isAssistantsEndpoint,
|
||||
defaultRetrievalModels,
|
||||
defaultAssistantsVersion,
|
||||
defaultAgentCapabilities,
|
||||
} = require('librechat-data-provider');
|
||||
const { Providers } = require('@librechat/agents');
|
||||
const partialRight = require('lodash/partialRight');
|
||||
|
|
@ -197,15 +197,7 @@ function generateConfig(key, baseURL, endpoint) {
|
|||
}
|
||||
|
||||
if (agents) {
|
||||
config.capabilities = [
|
||||
AgentCapabilities.execute_code,
|
||||
AgentCapabilities.file_search,
|
||||
AgentCapabilities.artifacts,
|
||||
AgentCapabilities.actions,
|
||||
AgentCapabilities.tools,
|
||||
AgentCapabilities.ocr,
|
||||
AgentCapabilities.chain,
|
||||
];
|
||||
config.capabilities = defaultAgentCapabilities;
|
||||
}
|
||||
|
||||
if (assistants && endpoint === EModelEndpoint.azureAssistants) {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
const fs = require('fs').promises;
|
||||
const { getImporter } = require('./importers');
|
||||
const { indexSync } = require('~/lib/db');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
|
|
@ -15,8 +14,6 @@ const importConversations = async (job) => {
|
|||
const jsonData = JSON.parse(fileData);
|
||||
const importer = getImporter(jsonData);
|
||||
await importer(jsonData, requestUserId);
|
||||
// Sync Meilisearch index
|
||||
await indexSync();
|
||||
logger.debug(`user: ${requestUserId} | Finished importing conversations`);
|
||||
} catch (error) {
|
||||
logger.error(`user: ${requestUserId} | Failed to import conversation: `, error);
|
||||
|
|
|
|||
|
|
@ -84,14 +84,14 @@ describe('importChatGptConvo', () => {
|
|||
const { parent } = jsonData[0].mapping[id];
|
||||
|
||||
const expectedParentId = parent
|
||||
? idToUUIDMap.get(parent) ?? Constants.NO_PARENT
|
||||
? (idToUUIDMap.get(parent) ?? Constants.NO_PARENT)
|
||||
: Constants.NO_PARENT;
|
||||
|
||||
const actualMessageId = idToUUIDMap.get(id);
|
||||
const actualParentId = actualMessageId
|
||||
? importBatchBuilder.saveMessage.mock.calls.find(
|
||||
(call) => call[0].messageId === actualMessageId,
|
||||
)[0].parentMessageId
|
||||
(call) => call[0].messageId === actualMessageId,
|
||||
)[0].parentMessageId
|
||||
: Constants.NO_PARENT;
|
||||
|
||||
expect(actualParentId).toBe(expectedParentId);
|
||||
|
|
@ -544,7 +544,7 @@ describe('processAssistantMessage', () => {
|
|||
|
||||
// Expected output should have all citations replaced with markdown links
|
||||
const expectedOutput =
|
||||
'Signal Sciences is a web application security company that was founded on March 10, 2014, by Andrew Peterson, Nick Galbreath, and Zane Lackey. It operates as a for-profit company with its legal name being Signal Sciences Corp. The company has achieved significant growth and is recognized as the fastest-growing web application security company in the world. Signal Sciences developed a next-gen web application firewall (NGWAF) and runtime application self-protection (RASP) technologies designed to increase security and maintain reliability without compromising the performance of modern web applications distributed across cloud, on-premise, edge, or hybrid environments ([Signal Sciences - Crunchbase Company Profile & Funding](https://www.crunchbase.com/organization/signal-sciences)) ([Demand More from Your WAF - Signal Sciences now part of Fastly](https://www.signalsciences.com/)).\n\nIn a major development, Fastly, Inc., a provider of an edge cloud platform, announced the completion of its acquisition of Signal Sciences on October 1, 2020. This acquisition was valued at approximately $775 million in cash and stock. By integrating Signal Sciences\' powerful web application and API security solutions with Fastly\'s edge cloud platform and existing security offerings, they aimed to form a unified suite of security solutions. The merger was aimed at expanding Fastly\'s security portfolio, particularly at a time when digital security has become paramount for businesses operating online ([Fastly Completes Acquisition of Signal Sciences | Fastly](https://www.fastly.com/press/press-releases/fastly-completes-acquisition-signal-sciences)) ([Fastly Agrees to Acquire Signal Sciences for $775 Million - Cooley](https://www.cooley.com/news/coverage/2020/2020-08-27-fastly-agrees-to-acquire-signal-sciences-for-775-million)).';
|
||||
"Signal Sciences is a web application security company that was founded on March 10, 2014, by Andrew Peterson, Nick Galbreath, and Zane Lackey. It operates as a for-profit company with its legal name being Signal Sciences Corp. The company has achieved significant growth and is recognized as the fastest-growing web application security company in the world. Signal Sciences developed a next-gen web application firewall (NGWAF) and runtime application self-protection (RASP) technologies designed to increase security and maintain reliability without compromising the performance of modern web applications distributed across cloud, on-premise, edge, or hybrid environments ([Signal Sciences - Crunchbase Company Profile & Funding](https://www.crunchbase.com/organization/signal-sciences)) ([Demand More from Your WAF - Signal Sciences now part of Fastly](https://www.signalsciences.com/)).\n\nIn a major development, Fastly, Inc., a provider of an edge cloud platform, announced the completion of its acquisition of Signal Sciences on October 1, 2020. This acquisition was valued at approximately $775 million in cash and stock. By integrating Signal Sciences' powerful web application and API security solutions with Fastly's edge cloud platform and existing security offerings, they aimed to form a unified suite of security solutions. The merger was aimed at expanding Fastly's security portfolio, particularly at a time when digital security has become paramount for businesses operating online ([Fastly Completes Acquisition of Signal Sciences | Fastly](https://www.fastly.com/press/press-releases/fastly-completes-acquisition-signal-sciences)) ([Fastly Agrees to Acquire Signal Sciences for $775 Million - Cooley](https://www.cooley.com/news/coverage/2020/2020-08-27-fastly-agrees-to-acquire-signal-sciences-for-775-million)).";
|
||||
|
||||
const result = processAssistantMessage(assistantMessage, messageText);
|
||||
expect(result).toBe(expectedOutput);
|
||||
|
|
@ -603,7 +603,7 @@ describe('processAssistantMessage', () => {
|
|||
// In a ReDoS vulnerability, time would roughly double with each size increase
|
||||
for (let i = 1; i < results.length; i++) {
|
||||
const ratio = results[i] / results[i - 1];
|
||||
expect(ratio).toBeLessThan(2); // Processing time should not double
|
||||
expect(ratio).toBeLessThan(3); // Allow for CI environment variability while still catching ReDoS
|
||||
console.log(`Size ${sizes[i]} processing time ratio: ${ratio}`);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
const path = require('path');
|
||||
const expressStaticGzip = require('express-static-gzip');
|
||||
|
||||
const oneDayInSeconds = 24 * 60 * 60;
|
||||
|
|
@ -5,15 +6,45 @@ const oneDayInSeconds = 24 * 60 * 60;
|
|||
const sMaxAge = process.env.STATIC_CACHE_S_MAX_AGE || oneDayInSeconds;
|
||||
const maxAge = process.env.STATIC_CACHE_MAX_AGE || oneDayInSeconds * 2;
|
||||
|
||||
const staticCache = (staticPath) =>
|
||||
expressStaticGzip(staticPath, {
|
||||
enableBrotli: false, // disable Brotli, only using gzip
|
||||
/**
|
||||
* Creates an Express static middleware with gzip compression and configurable caching
|
||||
*
|
||||
* @param {string} staticPath - The file system path to serve static files from
|
||||
* @param {Object} [options={}] - Configuration options
|
||||
* @param {boolean} [options.noCache=false] - If true, disables caching entirely for all files
|
||||
* @returns {ReturnType<expressStaticGzip>} Express middleware function for serving static files
|
||||
*/
|
||||
function staticCache(staticPath, options = {}) {
|
||||
const { noCache = false } = options;
|
||||
return expressStaticGzip(staticPath, {
|
||||
enableBrotli: false,
|
||||
orderPreference: ['gz'],
|
||||
setHeaders: (res, _path) => {
|
||||
if (process.env.NODE_ENV?.toLowerCase() === 'production') {
|
||||
setHeaders: (res, filePath) => {
|
||||
if (process.env.NODE_ENV?.toLowerCase() !== 'production') {
|
||||
return;
|
||||
}
|
||||
if (noCache) {
|
||||
res.setHeader('Cache-Control', 'no-store, no-cache, must-revalidate');
|
||||
return;
|
||||
}
|
||||
if (filePath.includes('/dist/images/')) {
|
||||
return;
|
||||
}
|
||||
const fileName = path.basename(filePath);
|
||||
|
||||
if (
|
||||
fileName === 'index.html' ||
|
||||
fileName.endsWith('.webmanifest') ||
|
||||
fileName === 'manifest.json' ||
|
||||
fileName === 'sw.js'
|
||||
) {
|
||||
res.setHeader('Cache-Control', 'no-store, no-cache, must-revalidate');
|
||||
} else {
|
||||
res.setHeader('Cache-Control', `public, max-age=${maxAge}, s-maxage=${sMaxAge}`);
|
||||
}
|
||||
},
|
||||
index: false,
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = staticCache;
|
||||
|
|
|
|||
|
|
@ -4,9 +4,11 @@ const googleLogin = require('./googleStrategy');
|
|||
const githubLogin = require('./githubStrategy');
|
||||
const discordLogin = require('./discordStrategy');
|
||||
const facebookLogin = require('./facebookStrategy');
|
||||
const setupOpenId = require('./openidStrategy');
|
||||
const { setupOpenId, getOpenIdConfig } = require('./openidStrategy');
|
||||
const jwtLogin = require('./jwtStrategy');
|
||||
const ldapLogin = require('./ldapStrategy');
|
||||
const { setupSaml } = require('./samlStrategy');
|
||||
const openIdJwtLogin = require('./openIdJwtStrategy');
|
||||
|
||||
module.exports = {
|
||||
appleLogin,
|
||||
|
|
@ -17,5 +19,8 @@ module.exports = {
|
|||
jwtLogin,
|
||||
facebookLogin,
|
||||
setupOpenId,
|
||||
getOpenIdConfig,
|
||||
ldapLogin,
|
||||
setupSaml,
|
||||
openIdJwtLogin,
|
||||
};
|
||||
|
|
@ -4,7 +4,7 @@ const { getUserById, updateUser } = require('~/models');
|
|||
const { logger } = require('~/config');
|
||||
|
||||
// JWT strategy
|
||||
const jwtLogin = async () =>
|
||||
const jwtLogin = () =>
|
||||
new JwtStrategy(
|
||||
{
|
||||
jwtFromRequest: ExtractJwt.fromAuthHeaderAsBearerToken(),
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ const {
|
|||
|
||||
// Check required environment variables
|
||||
if (!LDAP_URL || !LDAP_USER_SEARCH_BASE) {
|
||||
return null;
|
||||
module.exports = null;
|
||||
}
|
||||
|
||||
const searchAttributes = [
|
||||
|
|
|
|||
52
api/strategies/openIdJwtStrategy.js
Normal file
52
api/strategies/openIdJwtStrategy.js
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
const { SystemRoles } = require('librechat-data-provider');
|
||||
const { Strategy: JwtStrategy, ExtractJwt } = require('passport-jwt');
|
||||
const { updateUser, findUser } = require('~/models');
|
||||
const { logger } = require('~/config');
|
||||
const jwksRsa = require('jwks-rsa');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
/**
|
||||
* @function openIdJwtLogin
|
||||
* @param {import('openid-client').Configuration} openIdConfig - Configuration object for the JWT strategy.
|
||||
* @returns {JwtStrategy}
|
||||
* @description This function creates a JWT strategy for OpenID authentication.
|
||||
* It uses the jwks-rsa library to retrieve the signing key from a JWKS endpoint.
|
||||
* The strategy extracts the JWT from the Authorization header as a Bearer token.
|
||||
* The JWT is then verified using the signing key, and the user is retrieved from the database.
|
||||
*/
|
||||
const openIdJwtLogin = (openIdConfig) =>
|
||||
new JwtStrategy(
|
||||
{
|
||||
jwtFromRequest: ExtractJwt.fromAuthHeaderAsBearerToken(),
|
||||
secretOrKeyProvider: jwksRsa.passportJwtSecret({
|
||||
cache: isEnabled(process.env.OPENID_JWKS_URL_CACHE_ENABLED) || true,
|
||||
cacheMaxAge: process.env.OPENID_JWKS_URL_CACHE_TIME
|
||||
? eval(process.env.OPENID_JWKS_URL_CACHE_TIME)
|
||||
: 60000,
|
||||
jwksUri: openIdConfig.serverMetadata().jwks_uri,
|
||||
}),
|
||||
},
|
||||
async (payload, done) => {
|
||||
try {
|
||||
const user = await findUser({ openidId: payload?.sub });
|
||||
|
||||
if (user) {
|
||||
user.id = user._id.toString();
|
||||
if (!user.role) {
|
||||
user.role = SystemRoles.USER;
|
||||
await updateUser(user.id, { role: user.role });
|
||||
}
|
||||
done(null, user);
|
||||
} else {
|
||||
logger.warn(
|
||||
'[openIdJwtLogin] openId JwtStrategy => no user found with the sub claims: ' +
|
||||
payload?.sub,
|
||||
);
|
||||
done(null, false);
|
||||
}
|
||||
} catch (err) {
|
||||
done(err, false);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
module.exports = openIdJwtLogin;
|
||||
|
|
@ -1,28 +1,108 @@
|
|||
const { CacheKeys } = require('librechat-data-provider');
|
||||
const fetch = require('node-fetch');
|
||||
const passport = require('passport');
|
||||
const jwtDecode = require('jsonwebtoken/decode');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { Issuer, Strategy: OpenIDStrategy, custom } = require('openid-client');
|
||||
const client = require('openid-client');
|
||||
const { Strategy: OpenIDStrategy } = require('openid-client/passport');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { findUser, createUser, updateUser } = require('~/models/userMethods');
|
||||
const { hashToken } = require('~/server/utils/crypto');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
|
||||
let crypto;
|
||||
try {
|
||||
crypto = require('node:crypto');
|
||||
} catch (err) {
|
||||
logger.error('[openidStrategy] crypto support is disabled!', err);
|
||||
/**
|
||||
* @typedef {import('openid-client').ClientMetadata} ClientMetadata
|
||||
* @typedef {import('openid-client').Configuration} Configuration
|
||||
**/
|
||||
|
||||
/** @typedef {Configuration | null} */
|
||||
let openidConfig = null;
|
||||
|
||||
//overload currenturl function because of express version 4 buggy req.host doesn't include port
|
||||
//More info https://github.com/panva/openid-client/pull/713
|
||||
|
||||
class CustomOpenIDStrategy extends OpenIDStrategy {
|
||||
currentUrl(req) {
|
||||
const hostAndProtocol = process.env.DOMAIN_SERVER;
|
||||
return new URL(`${hostAndProtocol}${req.originalUrl ?? req.url}`);
|
||||
}
|
||||
authorizationRequestParams(req, options) {
|
||||
const params = super.authorizationRequestParams(req, options);
|
||||
if (options?.state && !params.has('state')) {
|
||||
params.set('state', options.state);
|
||||
}
|
||||
return params;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Exchange the access token for a new access token using the on-behalf-of flow if required.
|
||||
* @param {Configuration} config
|
||||
* @param {string} accessToken access token to be exchanged if necessary
|
||||
* @param {string} sub - The subject identifier of the user. usually found as "sub" in the claims of the token
|
||||
* @param {boolean} fromCache - Indicates whether to use cached tokens.
|
||||
* @returns {Promise<string>} The new access token if exchanged, otherwise the original access token.
|
||||
*/
|
||||
const exchangeAccessTokenIfNeeded = async (config, accessToken, sub, fromCache = false) => {
|
||||
const tokensCache = getLogStores(CacheKeys.OPENID_EXCHANGED_TOKENS);
|
||||
const onBehalfFlowRequired = isEnabled(process.env.OPENID_ON_BEHALF_FLOW_FOR_USERINFRO_REQUIRED);
|
||||
if (onBehalfFlowRequired) {
|
||||
if (fromCache) {
|
||||
const cachedToken = await tokensCache.get(sub);
|
||||
if (cachedToken) {
|
||||
return cachedToken.access_token;
|
||||
}
|
||||
}
|
||||
const grantResponse = await client.genericGrantRequest(
|
||||
config,
|
||||
'urn:ietf:params:oauth:grant-type:jwt-bearer',
|
||||
{
|
||||
scope: process.env.OPENID_ON_BEHALF_FLOW_USERINFRO_SCOPE || 'user.read',
|
||||
assertion: accessToken,
|
||||
requested_token_use: 'on_behalf_of',
|
||||
},
|
||||
);
|
||||
await tokensCache.set(
|
||||
sub,
|
||||
{
|
||||
access_token: grantResponse.access_token,
|
||||
},
|
||||
grantResponse.expires_in * 1000,
|
||||
);
|
||||
return grantResponse.access_token;
|
||||
}
|
||||
return accessToken;
|
||||
};
|
||||
|
||||
/**
|
||||
* get user info from openid provider
|
||||
* @param {Configuration} config
|
||||
* @param {string} accessToken access token
|
||||
* @param {string} sub - The subject identifier of the user. usually found as "sub" in the claims of the token
|
||||
* @returns {Promise<Object|null>}
|
||||
*/
|
||||
const getUserInfo = async (config, accessToken, sub) => {
|
||||
try {
|
||||
const exchangedAccessToken = await exchangeAccessTokenIfNeeded(config, accessToken, sub);
|
||||
return await client.fetchUserInfo(config, exchangedAccessToken, sub);
|
||||
} catch (error) {
|
||||
logger.warn(`[openidStrategy] getUserInfo: Error fetching user info: ${error}`);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Downloads an image from a URL using an access token.
|
||||
* @param {string} url
|
||||
* @param {string} accessToken
|
||||
* @returns {Promise<Buffer>}
|
||||
* @param {Configuration} config
|
||||
* @param {string} accessToken access token
|
||||
* @param {string} sub - The subject identifier of the user. usually found as "sub" in the claims of the token
|
||||
* @returns {Promise<Buffer | string>} The image buffer or an empty string if the download fails.
|
||||
*/
|
||||
const downloadImage = async (url, accessToken) => {
|
||||
const downloadImage = async (url, config, accessToken, sub) => {
|
||||
const exchangedAccessToken = await exchangeAccessTokenIfNeeded(config, accessToken, sub, true);
|
||||
if (!url) {
|
||||
return '';
|
||||
}
|
||||
|
|
@ -31,7 +111,7 @@ const downloadImage = async (url, accessToken) => {
|
|||
const options = {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Authorization: `Bearer ${exchangedAccessToken}`,
|
||||
},
|
||||
};
|
||||
|
||||
|
|
@ -105,63 +185,68 @@ function convertToUsername(input, defaultValue = '') {
|
|||
return defaultValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets up the OpenID strategy for authentication.
|
||||
* This function configures the OpenID client, handles proxy settings,
|
||||
* and defines the OpenID strategy for Passport.js.
|
||||
*
|
||||
* @async
|
||||
* @function setupOpenId
|
||||
* @returns {Promise<Configuration | null>} A promise that resolves when the OpenID strategy is set up and returns the openid client config object.
|
||||
* @throws {Error} If an error occurs during the setup process.
|
||||
*/
|
||||
async function setupOpenId() {
|
||||
try {
|
||||
if (process.env.PROXY) {
|
||||
const proxyAgent = new HttpsProxyAgent(process.env.PROXY);
|
||||
custom.setHttpOptionsDefaults({
|
||||
agent: proxyAgent,
|
||||
});
|
||||
logger.info(`[openidStrategy] proxy agent added: ${process.env.PROXY}`);
|
||||
}
|
||||
const issuer = await Issuer.discover(process.env.OPENID_ISSUER);
|
||||
/* Supported Algorithms, openid-client v5 doesn't set it automatically as discovered from server.
|
||||
- id_token_signed_response_alg // defaults to 'RS256'
|
||||
- request_object_signing_alg // defaults to 'RS256'
|
||||
- userinfo_signed_response_alg // not in v5
|
||||
- introspection_signed_response_alg // not in v5
|
||||
- authorization_signed_response_alg // not in v5
|
||||
*/
|
||||
/** @type {import('openid-client').ClientMetadata} */
|
||||
/** @type {ClientMetadata} */
|
||||
const clientMetadata = {
|
||||
client_id: process.env.OPENID_CLIENT_ID,
|
||||
client_secret: process.env.OPENID_CLIENT_SECRET,
|
||||
redirect_uris: [process.env.DOMAIN_SERVER + process.env.OPENID_CALLBACK_URL],
|
||||
};
|
||||
if (isEnabled(process.env.OPENID_SET_FIRST_SUPPORTED_ALGORITHM)) {
|
||||
clientMetadata.id_token_signed_response_alg =
|
||||
issuer.id_token_signing_alg_values_supported?.[0] || 'RS256';
|
||||
|
||||
/** @type {Configuration} */
|
||||
openidConfig = await client.discovery(
|
||||
new URL(process.env.OPENID_ISSUER),
|
||||
process.env.OPENID_CLIENT_ID,
|
||||
clientMetadata,
|
||||
);
|
||||
if (process.env.PROXY) {
|
||||
const proxyAgent = new HttpsProxyAgent(process.env.PROXY);
|
||||
openidConfig[client.customFetch] = (...args) => {
|
||||
return fetch(args[0], { ...args[1], agent: proxyAgent });
|
||||
};
|
||||
logger.info(`[openidStrategy] proxy agent added: ${process.env.PROXY}`);
|
||||
}
|
||||
const client = new issuer.Client(clientMetadata);
|
||||
const requiredRole = process.env.OPENID_REQUIRED_ROLE;
|
||||
const requiredRoleParameterPath = process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH;
|
||||
const requiredRoleTokenKind = process.env.OPENID_REQUIRED_ROLE_TOKEN_KIND;
|
||||
const openidLogin = new OpenIDStrategy(
|
||||
const usePKCE = isEnabled(process.env.OPENID_USE_PKCE);
|
||||
const openidLogin = new CustomOpenIDStrategy(
|
||||
{
|
||||
client,
|
||||
params: {
|
||||
scope: process.env.OPENID_SCOPE,
|
||||
},
|
||||
config: openidConfig,
|
||||
scope: process.env.OPENID_SCOPE,
|
||||
callbackURL: process.env.DOMAIN_SERVER + process.env.OPENID_CALLBACK_URL,
|
||||
usePKCE,
|
||||
},
|
||||
async (tokenset, userinfo, done) => {
|
||||
async (tokenset, done) => {
|
||||
try {
|
||||
logger.info(`[openidStrategy] verify login openidId: ${userinfo.sub}`);
|
||||
logger.debug('[openidStrategy] very login tokenset and userinfo', { tokenset, userinfo });
|
||||
|
||||
let user = await findUser({ openidId: userinfo.sub });
|
||||
const claims = tokenset.claims();
|
||||
let user = await findUser({ openidId: claims.sub });
|
||||
logger.info(
|
||||
`[openidStrategy] user ${user ? 'found' : 'not found'} with openidId: ${userinfo.sub}`,
|
||||
`[openidStrategy] user ${user ? 'found' : 'not found'} with openidId: ${claims.sub}`,
|
||||
);
|
||||
|
||||
if (!user) {
|
||||
user = await findUser({ email: userinfo.email });
|
||||
user = await findUser({ email: claims.email });
|
||||
logger.info(
|
||||
`[openidStrategy] user ${user ? 'found' : 'not found'} with email: ${
|
||||
userinfo.email
|
||||
} for openidId: ${userinfo.sub}`,
|
||||
claims.email
|
||||
} for openidId: ${claims.sub}`,
|
||||
);
|
||||
}
|
||||
|
||||
const userinfo = {
|
||||
...claims,
|
||||
...(await getUserInfo(openidConfig, tokenset.access_token, claims.sub)),
|
||||
};
|
||||
const fullName = getFullName(userinfo);
|
||||
|
||||
if (requiredRole) {
|
||||
|
|
@ -220,7 +305,7 @@ async function setupOpenId() {
|
|||
user.name = fullName;
|
||||
}
|
||||
|
||||
if (userinfo.picture && !user.avatar?.includes('manual=true')) {
|
||||
if (!!userinfo && userinfo.picture && !user.avatar?.includes('manual=true')) {
|
||||
/** @type {string | undefined} */
|
||||
const imageUrl = userinfo.picture;
|
||||
|
||||
|
|
@ -231,7 +316,12 @@ async function setupOpenId() {
|
|||
fileName = userinfo.sub + '.png';
|
||||
}
|
||||
|
||||
const imageBuffer = await downloadImage(imageUrl, tokenset.access_token);
|
||||
const imageBuffer = await downloadImage(
|
||||
imageUrl,
|
||||
openidConfig,
|
||||
tokenset.access_token,
|
||||
userinfo.sub,
|
||||
);
|
||||
if (imageBuffer) {
|
||||
const { saveBuffer } = getStrategyFunctions(process.env.CDN_PROVIDER);
|
||||
const imagePath = await saveBuffer({
|
||||
|
|
@ -257,18 +347,34 @@ async function setupOpenId() {
|
|||
},
|
||||
);
|
||||
|
||||
done(null, user);
|
||||
done(null, { ...user, tokenset });
|
||||
} catch (err) {
|
||||
logger.error('[openidStrategy] login failed', err);
|
||||
done(err);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
passport.use('openid', openidLogin);
|
||||
return openidConfig;
|
||||
} catch (err) {
|
||||
logger.error('[openidStrategy]', err);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @function getOpenIdConfig
|
||||
* @description Returns the OpenID client instance.
|
||||
* @throws {Error} If the OpenID client is not initialized.
|
||||
* @returns {Configuration}
|
||||
*/
|
||||
function getOpenIdConfig() {
|
||||
if (!openidConfig) {
|
||||
throw new Error('OpenID client is not initialized. Please call setupOpenId first.');
|
||||
}
|
||||
return openidConfig;
|
||||
}
|
||||
|
||||
module.exports = setupOpenId;
|
||||
module.exports = {
|
||||
setupOpenId,
|
||||
getOpenIdConfig,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,16 +1,13 @@
|
|||
const fetch = require('node-fetch');
|
||||
const jwtDecode = require('jsonwebtoken/decode');
|
||||
const { Issuer, Strategy: OpenIDStrategy } = require('openid-client');
|
||||
const { findUser, createUser, updateUser } = require('~/models/userMethods');
|
||||
const setupOpenId = require('./openidStrategy');
|
||||
const { setupOpenId } = require('./openidStrategy');
|
||||
|
||||
// --- Mocks ---
|
||||
jest.mock('node-fetch');
|
||||
jest.mock('openid-client');
|
||||
jest.mock('jsonwebtoken/decode');
|
||||
jest.mock('~/server/services/Files/strategies', () => ({
|
||||
getStrategyFunctions: jest.fn(() => ({
|
||||
// You can modify this mock as needed (here returning a dummy function)
|
||||
saveBuffer: jest.fn().mockResolvedValue('/fake/path/to/avatar.png'),
|
||||
})),
|
||||
}));
|
||||
|
|
@ -23,38 +20,73 @@ jest.mock('~/server/utils/crypto', () => ({
|
|||
hashToken: jest.fn().mockResolvedValue('hashed-token'),
|
||||
}));
|
||||
jest.mock('~/server/utils', () => ({
|
||||
isEnabled: jest.fn(() => false), // default to false, override per test if needed
|
||||
isEnabled: jest.fn(() => false),
|
||||
}));
|
||||
jest.mock('~/config', () => ({
|
||||
logger: {
|
||||
info: jest.fn(),
|
||||
debug: jest.fn(),
|
||||
error: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
},
|
||||
}));
|
||||
jest.mock('~/cache/getLogStores', () =>
|
||||
jest.fn(() => ({
|
||||
get: jest.fn(),
|
||||
set: jest.fn(),
|
||||
})),
|
||||
);
|
||||
jest.mock('librechat-data-provider', () => ({
|
||||
CacheKeys: {
|
||||
OPENID_EXCHANGED_TOKENS: 'openid-exchanged-tokens',
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock Issuer.discover so that setupOpenId gets a fake issuer and client
|
||||
Issuer.discover = jest.fn().mockResolvedValue({
|
||||
id_token_signing_alg_values_supported: ['RS256'],
|
||||
Client: jest.fn().mockImplementation((clientMetadata) => {
|
||||
return {
|
||||
metadata: clientMetadata,
|
||||
};
|
||||
}),
|
||||
// Mock the openid-client module and all its dependencies
|
||||
jest.mock('openid-client', () => {
|
||||
return {
|
||||
discovery: jest.fn().mockResolvedValue({
|
||||
clientId: 'fake_client_id',
|
||||
clientSecret: 'fake_client_secret',
|
||||
issuer: 'https://fake-issuer.com',
|
||||
// Add any other properties needed by the implementation
|
||||
}),
|
||||
fetchUserInfo: jest.fn().mockImplementation((config, accessToken, sub) => {
|
||||
// Only return additional properties, but don't override any claims
|
||||
return Promise.resolve({
|
||||
preferred_username: 'preferred_username',
|
||||
});
|
||||
}),
|
||||
customFetch: Symbol('customFetch'),
|
||||
};
|
||||
});
|
||||
|
||||
// To capture the verify callback from the strategy, we grab it from the mock constructor
|
||||
let verifyCallback;
|
||||
OpenIDStrategy.mockImplementation((options, verify) => {
|
||||
verifyCallback = verify;
|
||||
return { name: 'openid', options, verify };
|
||||
jest.mock('openid-client/passport', () => {
|
||||
let verifyCallback;
|
||||
const mockStrategy = jest.fn((options, verify) => {
|
||||
verifyCallback = verify;
|
||||
return { name: 'openid', options, verify };
|
||||
});
|
||||
|
||||
return {
|
||||
Strategy: mockStrategy,
|
||||
__getVerifyCallback: () => verifyCallback,
|
||||
};
|
||||
});
|
||||
|
||||
// Mock passport
|
||||
jest.mock('passport', () => ({
|
||||
use: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('setupOpenId', () => {
|
||||
// Store a reference to the verify callback once it's set up
|
||||
let verifyCallback;
|
||||
|
||||
// Helper to wrap the verify callback in a promise
|
||||
const validate = (tokenset, userinfo) =>
|
||||
const validate = (tokenset) =>
|
||||
new Promise((resolve, reject) => {
|
||||
verifyCallback(tokenset, userinfo, (err, user, details) => {
|
||||
verifyCallback(tokenset, (err, user, details) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
|
|
@ -66,17 +98,16 @@ describe('setupOpenId', () => {
|
|||
const tokenset = {
|
||||
id_token: 'fake_id_token',
|
||||
access_token: 'fake_access_token',
|
||||
};
|
||||
|
||||
const baseUserinfo = {
|
||||
sub: '1234',
|
||||
email: 'test@example.com',
|
||||
email_verified: true,
|
||||
given_name: 'First',
|
||||
family_name: 'Last',
|
||||
name: 'My Full',
|
||||
username: 'flast',
|
||||
picture: 'https://example.com/avatar.png',
|
||||
claims: () => ({
|
||||
sub: '1234',
|
||||
email: 'test@example.com',
|
||||
email_verified: true,
|
||||
given_name: 'First',
|
||||
family_name: 'Last',
|
||||
name: 'My Full',
|
||||
username: 'flast',
|
||||
picture: 'https://example.com/avatar.png',
|
||||
}),
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
|
|
@ -96,6 +127,7 @@ describe('setupOpenId', () => {
|
|||
delete process.env.OPENID_USERNAME_CLAIM;
|
||||
delete process.env.OPENID_NAME_CLAIM;
|
||||
delete process.env.PROXY;
|
||||
delete process.env.OPENID_USE_PKCE;
|
||||
|
||||
// Default jwtDecode mock returns a token that includes the required role.
|
||||
jwtDecode.mockReturnValue({
|
||||
|
|
@ -120,16 +152,17 @@ describe('setupOpenId', () => {
|
|||
};
|
||||
fetch.mockResolvedValue(fakeResponse);
|
||||
|
||||
// Finally, call the setup function so that passport.use gets called
|
||||
// Call the setup function and capture the verify callback
|
||||
await setupOpenId();
|
||||
verifyCallback = require('openid-client/passport').__getVerifyCallback();
|
||||
});
|
||||
|
||||
it('should create a new user with correct username when username claim exists', async () => {
|
||||
// Arrange – our userinfo already has username 'flast'
|
||||
const userinfo = { ...baseUserinfo };
|
||||
const userinfo = tokenset.claims();
|
||||
|
||||
// Act
|
||||
const { user } = await validate(tokenset, userinfo);
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
// Assert
|
||||
expect(user.username).toBe(userinfo.username);
|
||||
|
|
@ -148,13 +181,13 @@ describe('setupOpenId', () => {
|
|||
|
||||
it('should use given_name as username when username claim is missing', async () => {
|
||||
// Arrange – remove username from userinfo
|
||||
const userinfo = { ...baseUserinfo };
|
||||
const userinfo = { ...tokenset.claims() };
|
||||
delete userinfo.username;
|
||||
// Expect the username to be the given name (unchanged case)
|
||||
const expectUsername = userinfo.given_name;
|
||||
|
||||
// Act
|
||||
const { user } = await validate(tokenset, userinfo);
|
||||
const { user } = await validate({ ...tokenset, claims: () => userinfo });
|
||||
|
||||
// Assert
|
||||
expect(user.username).toBe(expectUsername);
|
||||
|
|
@ -167,13 +200,13 @@ describe('setupOpenId', () => {
|
|||
|
||||
it('should use email as username when username and given_name are missing', async () => {
|
||||
// Arrange – remove username and given_name
|
||||
const userinfo = { ...baseUserinfo };
|
||||
const userinfo = { ...tokenset.claims() };
|
||||
delete userinfo.username;
|
||||
delete userinfo.given_name;
|
||||
const expectUsername = userinfo.email;
|
||||
|
||||
// Act
|
||||
const { user } = await validate(tokenset, userinfo);
|
||||
const { user } = await validate({ ...tokenset, claims: () => userinfo });
|
||||
|
||||
// Assert
|
||||
expect(user.username).toBe(expectUsername);
|
||||
|
|
@ -187,10 +220,10 @@ describe('setupOpenId', () => {
|
|||
it('should override username with OPENID_USERNAME_CLAIM when set', async () => {
|
||||
// Arrange – set OPENID_USERNAME_CLAIM so that the sub claim is used
|
||||
process.env.OPENID_USERNAME_CLAIM = 'sub';
|
||||
const userinfo = { ...baseUserinfo };
|
||||
const userinfo = tokenset.claims();
|
||||
|
||||
// Act
|
||||
const { user } = await validate(tokenset, userinfo);
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
// Assert – username should equal the sub (converted as-is)
|
||||
expect(user.username).toBe(userinfo.sub);
|
||||
|
|
@ -203,11 +236,11 @@ describe('setupOpenId', () => {
|
|||
|
||||
it('should set the full name correctly when given_name and family_name exist', async () => {
|
||||
// Arrange
|
||||
const userinfo = { ...baseUserinfo };
|
||||
const userinfo = tokenset.claims();
|
||||
const expectedFullName = `${userinfo.given_name} ${userinfo.family_name}`;
|
||||
|
||||
// Act
|
||||
const { user } = await validate(tokenset, userinfo);
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
// Assert
|
||||
expect(user.name).toBe(expectedFullName);
|
||||
|
|
@ -216,10 +249,10 @@ describe('setupOpenId', () => {
|
|||
it('should override full name with OPENID_NAME_CLAIM when set', async () => {
|
||||
// Arrange – use the name claim as the full name
|
||||
process.env.OPENID_NAME_CLAIM = 'name';
|
||||
const userinfo = { ...baseUserinfo, name: 'Custom Name' };
|
||||
const userinfo = { ...tokenset.claims(), name: 'Custom Name' };
|
||||
|
||||
// Act
|
||||
const { user } = await validate(tokenset, userinfo);
|
||||
const { user } = await validate({ ...tokenset, claims: () => userinfo });
|
||||
|
||||
// Assert
|
||||
expect(user.name).toBe('Custom Name');
|
||||
|
|
@ -230,31 +263,31 @@ describe('setupOpenId', () => {
|
|||
const existingUser = {
|
||||
_id: 'existingUserId',
|
||||
provider: 'local',
|
||||
email: baseUserinfo.email,
|
||||
email: tokenset.claims().email,
|
||||
openidId: '',
|
||||
username: '',
|
||||
name: '',
|
||||
};
|
||||
findUser.mockImplementation(async (query) => {
|
||||
if (query.openidId === baseUserinfo.sub || query.email === baseUserinfo.email) {
|
||||
if (query.openidId === tokenset.claims().sub || query.email === tokenset.claims().email) {
|
||||
return existingUser;
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
const userinfo = { ...baseUserinfo };
|
||||
const userinfo = tokenset.claims();
|
||||
|
||||
// Act
|
||||
await validate(tokenset, userinfo);
|
||||
await validate(tokenset);
|
||||
|
||||
// Assert – updateUser should be called and the user object updated
|
||||
expect(updateUser).toHaveBeenCalledWith(
|
||||
existingUser._id,
|
||||
expect.objectContaining({
|
||||
provider: 'openid',
|
||||
openidId: baseUserinfo.sub,
|
||||
username: baseUserinfo.username,
|
||||
name: `${baseUserinfo.given_name} ${baseUserinfo.family_name}`,
|
||||
openidId: userinfo.sub,
|
||||
username: userinfo.username,
|
||||
name: `${userinfo.given_name} ${userinfo.family_name}`,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
|
@ -264,10 +297,10 @@ describe('setupOpenId', () => {
|
|||
jwtDecode.mockReturnValue({
|
||||
roles: ['SomeOtherRole'],
|
||||
});
|
||||
const userinfo = { ...baseUserinfo };
|
||||
const userinfo = tokenset.claims();
|
||||
|
||||
// Act
|
||||
const { user, details } = await validate(tokenset, userinfo);
|
||||
const { user, details } = await validate(tokenset);
|
||||
|
||||
// Assert – verify that the strategy rejects login
|
||||
expect(user).toBe(false);
|
||||
|
|
@ -276,10 +309,10 @@ describe('setupOpenId', () => {
|
|||
|
||||
it('should attempt to download and save the avatar if picture is provided', async () => {
|
||||
// Arrange – ensure userinfo contains a picture URL
|
||||
const userinfo = { ...baseUserinfo };
|
||||
const userinfo = tokenset.claims();
|
||||
|
||||
// Act
|
||||
const { user } = await validate(tokenset, userinfo);
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
// Assert – verify that download was attempted and the avatar field was set via updateUser
|
||||
expect(fetch).toHaveBeenCalled();
|
||||
|
|
@ -289,14 +322,25 @@ describe('setupOpenId', () => {
|
|||
|
||||
it('should not attempt to download avatar if picture is not provided', async () => {
|
||||
// Arrange – remove picture
|
||||
const userinfo = { ...baseUserinfo };
|
||||
const userinfo = { ...tokenset.claims() };
|
||||
delete userinfo.picture;
|
||||
|
||||
// Act
|
||||
await validate(tokenset, userinfo);
|
||||
await validate({ ...tokenset, claims: () => userinfo });
|
||||
|
||||
// Assert – fetch should not be called and avatar should remain undefined or empty
|
||||
expect(fetch).not.toHaveBeenCalled();
|
||||
// Depending on your implementation, user.avatar may be undefined or an empty string.
|
||||
});
|
||||
|
||||
it('should default to usePKCE false when OPENID_USE_PKCE is not defined', async () => {
|
||||
const OpenIDStrategy = require('openid-client/passport').Strategy;
|
||||
|
||||
delete process.env.OPENID_USE_PKCE;
|
||||
await setupOpenId();
|
||||
|
||||
const callOptions = OpenIDStrategy.mock.calls[OpenIDStrategy.mock.calls.length - 1][0];
|
||||
expect(callOptions.usePKCE).toBe(false);
|
||||
expect(callOptions.params?.code_challenge_method).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
|
|
|||
276
api/strategies/samlStrategy.js
Normal file
276
api/strategies/samlStrategy.js
Normal file
|
|
@ -0,0 +1,276 @@
|
|||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const fetch = require('node-fetch');
|
||||
const passport = require('passport');
|
||||
const { Strategy: SamlStrategy } = require('@node-saml/passport-saml');
|
||||
const { findUser, createUser, updateUser } = require('~/models/userMethods');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { hashToken } = require('~/server/utils/crypto');
|
||||
const { logger } = require('~/config');
|
||||
const paths = require('~/config/paths');
|
||||
|
||||
let crypto;
|
||||
try {
|
||||
crypto = require('node:crypto');
|
||||
} catch (err) {
|
||||
logger.error('[samlStrategy] crypto support is disabled!', err);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the certificate content from the given value.
|
||||
*
|
||||
* This function determines whether the provided value is a certificate string (RFC7468 format or
|
||||
* base64-encoded without a header) or a valid file path. If the value matches one of these formats,
|
||||
* the certificate content is returned. Otherwise, an error is thrown.
|
||||
*
|
||||
* @see https://github.com/node-saml/node-saml/tree/master?tab=readme-ov-file#configuration-option-idpcert
|
||||
* @param {string} value - The certificate string or file path.
|
||||
* @returns {string} The certificate content if valid.
|
||||
* @throws {Error} If the value is not a valid certificate string or file path.
|
||||
*/
|
||||
function getCertificateContent(value) {
|
||||
if (typeof value !== 'string') {
|
||||
throw new Error('Invalid input: SAML_CERT must be a string.');
|
||||
}
|
||||
|
||||
// Check if it's an RFC7468 formatted PEM certificate
|
||||
const pemRegex = new RegExp(
|
||||
'-----BEGIN (CERTIFICATE|PUBLIC KEY)-----\n' + // header
|
||||
'([A-Za-z0-9+/=]{64}\n)+' + // base64 content (64 characters per line)
|
||||
'[A-Za-z0-9+/=]{1,64}\n' + // base64 content (last line)
|
||||
'-----END (CERTIFICATE|PUBLIC KEY)-----', // footer
|
||||
);
|
||||
if (pemRegex.test(value)) {
|
||||
logger.info('[samlStrategy] Detected RFC7468-formatted certificate string.');
|
||||
return value;
|
||||
}
|
||||
|
||||
// Check if it's a Base64-encoded certificate (no header)
|
||||
if (/^[A-Za-z0-9+/=]+$/.test(value) && value.length % 4 === 0) {
|
||||
logger.info('[samlStrategy] Detected base64-encoded certificate string (no header).');
|
||||
return value;
|
||||
}
|
||||
|
||||
// Check if file exists and is readable
|
||||
const certPath = path.normalize(path.isAbsolute(value) ? value : path.join(paths.root, value));
|
||||
if (fs.existsSync(certPath) && fs.statSync(certPath).isFile()) {
|
||||
try {
|
||||
logger.info(`[samlStrategy] Loading certificate from file: ${certPath}`);
|
||||
return fs.readFileSync(certPath, 'utf8').trim();
|
||||
} catch (error) {
|
||||
throw new Error(`Error reading certificate file: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error('Invalid cert: SAML_CERT must be a valid file path or certificate string.');
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves a SAML claim from a profile object based on environment configuration.
|
||||
* @param {object} profile - Saml profile
|
||||
* @param {string} envVar - Environment variable name (SAML_*)
|
||||
* @param {string} defaultKey - Default key to use if the environment variable is not set
|
||||
* @returns {string}
|
||||
*/
|
||||
function getSamlClaim(profile, envVar, defaultKey) {
|
||||
const claimKey = process.env[envVar];
|
||||
|
||||
// Avoids accessing `profile[""]` when the environment variable is empty string.
|
||||
if (claimKey) {
|
||||
return profile[claimKey] ?? profile[defaultKey];
|
||||
}
|
||||
return profile[defaultKey];
|
||||
}
|
||||
|
||||
function getEmail(profile) {
|
||||
return getSamlClaim(profile, 'SAML_EMAIL_CLAIM', 'email');
|
||||
}
|
||||
|
||||
function getUserName(profile) {
|
||||
return getSamlClaim(profile, 'SAML_USERNAME_CLAIM', 'username');
|
||||
}
|
||||
|
||||
function getGivenName(profile) {
|
||||
return getSamlClaim(profile, 'SAML_GIVEN_NAME_CLAIM', 'given_name');
|
||||
}
|
||||
|
||||
function getFamilyName(profile) {
|
||||
return getSamlClaim(profile, 'SAML_FAMILY_NAME_CLAIM', 'family_name');
|
||||
}
|
||||
|
||||
function getPicture(profile) {
|
||||
return getSamlClaim(profile, 'SAML_PICTURE_CLAIM', 'picture');
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads an image from a URL using an access token.
|
||||
* @param {string} url
|
||||
* @returns {Promise<Buffer>}
|
||||
*/
|
||||
const downloadImage = async (url) => {
|
||||
try {
|
||||
const response = await fetch(url);
|
||||
if (response.ok) {
|
||||
return await response.buffer();
|
||||
} else {
|
||||
throw new Error(`${response.statusText} (HTTP ${response.status})`);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`[samlStrategy] Error downloading image at URL "${url}": ${error}`);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Determines the full name of a user based on SAML profile and environment configuration.
|
||||
*
|
||||
* @param {Object} profile - The user profile object from SAML Connect
|
||||
* @returns {string} The determined full name of the user
|
||||
*/
|
||||
function getFullName(profile) {
|
||||
if (process.env.SAML_NAME_CLAIM) {
|
||||
logger.info(
|
||||
`[samlStrategy] Using SAML_NAME_CLAIM: ${process.env.SAML_NAME_CLAIM}, profile: ${profile[process.env.SAML_NAME_CLAIM]}`,
|
||||
);
|
||||
return profile[process.env.SAML_NAME_CLAIM];
|
||||
}
|
||||
|
||||
const givenName = getGivenName(profile);
|
||||
const familyName = getFamilyName(profile);
|
||||
|
||||
if (givenName && familyName) {
|
||||
return `${givenName} ${familyName}`;
|
||||
}
|
||||
|
||||
if (givenName) {
|
||||
return givenName;
|
||||
}
|
||||
if (familyName) {
|
||||
return familyName;
|
||||
}
|
||||
|
||||
return getUserName(profile) || getEmail(profile);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an input into a string suitable for a username.
|
||||
* If the input is a string, it will be returned as is.
|
||||
* If the input is an array, elements will be joined with underscores.
|
||||
* In case of undefined or other falsy values, a default value will be returned.
|
||||
*
|
||||
* @param {string | string[] | undefined} input - The input value to be converted into a username.
|
||||
* @param {string} [defaultValue=''] - The default value to return if the input is falsy.
|
||||
* @returns {string} The processed input as a string suitable for a username.
|
||||
*/
|
||||
function convertToUsername(input, defaultValue = '') {
|
||||
if (typeof input === 'string') {
|
||||
return input;
|
||||
} else if (Array.isArray(input)) {
|
||||
return input.join('_');
|
||||
}
|
||||
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
async function setupSaml() {
|
||||
try {
|
||||
const samlConfig = {
|
||||
entryPoint: process.env.SAML_ENTRY_POINT,
|
||||
issuer: process.env.SAML_ISSUER,
|
||||
callbackUrl: process.env.SAML_CALLBACK_URL,
|
||||
idpCert: getCertificateContent(process.env.SAML_CERT),
|
||||
wantAssertionsSigned: process.env.SAML_USE_AUTHN_RESPONSE_SIGNED === 'true' ? false : true,
|
||||
wantAuthnResponseSigned: process.env.SAML_USE_AUTHN_RESPONSE_SIGNED === 'true' ? true : false,
|
||||
};
|
||||
|
||||
passport.use(
|
||||
'saml',
|
||||
new SamlStrategy(samlConfig, async (profile, done) => {
|
||||
try {
|
||||
logger.info(`[samlStrategy] SAML authentication received for NameID: ${profile.nameID}`);
|
||||
logger.debug('[samlStrategy] SAML profile:', profile);
|
||||
|
||||
let user = await findUser({ samlId: profile.nameID });
|
||||
logger.info(
|
||||
`[samlStrategy] User ${user ? 'found' : 'not found'} with SAML ID: ${profile.nameID}`,
|
||||
);
|
||||
|
||||
if (!user) {
|
||||
const email = getEmail(profile) || '';
|
||||
user = await findUser({ email });
|
||||
logger.info(
|
||||
`[samlStrategy] User ${user ? 'found' : 'not found'} with email: ${profile.email}`,
|
||||
);
|
||||
}
|
||||
|
||||
const fullName = getFullName(profile);
|
||||
|
||||
const username = convertToUsername(
|
||||
getUserName(profile) || getGivenName(profile) || getEmail(profile),
|
||||
);
|
||||
|
||||
if (!user) {
|
||||
user = {
|
||||
provider: 'saml',
|
||||
samlId: profile.nameID,
|
||||
username,
|
||||
email: getEmail(profile) || '',
|
||||
emailVerified: true,
|
||||
name: fullName,
|
||||
};
|
||||
user = await createUser(user, true, true);
|
||||
} else {
|
||||
user.provider = 'saml';
|
||||
user.samlId = profile.nameID;
|
||||
user.username = username;
|
||||
user.name = fullName;
|
||||
}
|
||||
|
||||
const picture = getPicture(profile);
|
||||
if (picture && !user.avatar?.includes('manual=true')) {
|
||||
const imageBuffer = await downloadImage(profile.picture);
|
||||
if (imageBuffer) {
|
||||
let fileName;
|
||||
if (crypto) {
|
||||
fileName = (await hashToken(profile.nameID)) + '.png';
|
||||
} else {
|
||||
fileName = profile.nameID + '.png';
|
||||
}
|
||||
|
||||
const { saveBuffer } = getStrategyFunctions(process.env.CDN_PROVIDER);
|
||||
const imagePath = await saveBuffer({
|
||||
fileName,
|
||||
userId: user._id.toString(),
|
||||
buffer: imageBuffer,
|
||||
});
|
||||
user.avatar = imagePath ?? '';
|
||||
}
|
||||
}
|
||||
|
||||
user = await updateUser(user._id, user);
|
||||
|
||||
logger.info(
|
||||
`[samlStrategy] Login success SAML ID: ${user.samlId} | email: ${user.email} | username: ${user.username}`,
|
||||
{
|
||||
user: {
|
||||
samlId: user.samlId,
|
||||
username: user.username,
|
||||
email: user.email,
|
||||
name: user.name,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
done(null, user);
|
||||
} catch (err) {
|
||||
logger.error('[samlStrategy] Login failed', err);
|
||||
done(err);
|
||||
}
|
||||
}),
|
||||
);
|
||||
} catch (err) {
|
||||
logger.error('[samlStrategy]', err);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { setupSaml, getCertificateContent };
|
||||
428
api/strategies/samlStrategy.spec.js
Normal file
428
api/strategies/samlStrategy.spec.js
Normal file
|
|
@ -0,0 +1,428 @@
|
|||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const fetch = require('node-fetch');
|
||||
const { Strategy: SamlStrategy } = require('@node-saml/passport-saml');
|
||||
const { findUser, createUser, updateUser } = require('~/models/userMethods');
|
||||
const { setupSaml, getCertificateContent } = require('./samlStrategy');
|
||||
|
||||
// --- Mocks ---
|
||||
jest.mock('fs');
|
||||
jest.mock('path');
|
||||
jest.mock('node-fetch');
|
||||
jest.mock('@node-saml/passport-saml');
|
||||
jest.mock('~/models/userMethods', () => ({
|
||||
findUser: jest.fn(),
|
||||
createUser: jest.fn(),
|
||||
updateUser: jest.fn(),
|
||||
}));
|
||||
jest.mock('~/server/services/Files/strategies', () => ({
|
||||
getStrategyFunctions: jest.fn(() => ({
|
||||
saveBuffer: jest.fn().mockResolvedValue('/fake/path/to/avatar.png'),
|
||||
})),
|
||||
}));
|
||||
jest.mock('~/server/utils/crypto', () => ({
|
||||
hashToken: jest.fn().mockResolvedValue('hashed-token'),
|
||||
}));
|
||||
jest.mock('~/server/utils', () => ({
|
||||
isEnabled: jest.fn(() => false),
|
||||
}));
|
||||
jest.mock('~/config', () => ({
|
||||
logger: {
|
||||
info: jest.fn(),
|
||||
debug: jest.fn(),
|
||||
error: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// To capture the verify callback from the strategy, we grab it from the mock constructor
|
||||
let verifyCallback;
|
||||
SamlStrategy.mockImplementation((options, verify) => {
|
||||
verifyCallback = verify;
|
||||
return { name: 'saml', options, verify };
|
||||
});
|
||||
|
||||
describe('getCertificateContent', () => {
|
||||
const certWithHeader = `-----BEGIN CERTIFICATE-----
|
||||
MIIDazCCAlOgAwIBAgIUKhXaFJGJJPx466rlwYORIsqCq7MwDQYJKoZIhvcNAQEL
|
||||
BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
|
||||
GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yNTAzMDQwODUxNTJaFw0yNjAz
|
||||
MDQwODUxNTJaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw
|
||||
HwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggEiMA0GCSqGSIb3DQEB
|
||||
AQUAA4IBDwAwggEKAoIBAQCWP09NZg0xaRiLpNygCVgV3M+4RFW2S0c5X/fg/uFT
|
||||
O5MfaVYzG5GxzhXzWRB8RtNPsxX/nlbPsoUroeHbz+SABkOsNEv6JuKRH4VXRH34
|
||||
VzjazVkPAwj+N4WqsC/Wo4EGGpKIGeGi8Zed4yvMqoTyE3mrS19fY0nMHT62wUwS
|
||||
GMm2pAQdAQePZ9WY7A5XOA1IoxW2Zh2Oxaf1p59epBkZDhoxSMu8GoSkvK27Km4A
|
||||
4UXftzdg/wHNPrNirmcYouioHdmrOtYxPjrhUBQ74AmE1/QK45B6wEgirKH1A1AW
|
||||
6C+ApLwpBMvy9+8Gbyvc8G18W3CjdEVKmAeWb9JUedSXAgMBAAGjUzBRMB0GA1Ud
|
||||
DgQWBBRxpaqBx8VDLLc8IkHATujj8IOs6jAfBgNVHSMEGDAWgBRxpaqBx8VDLLc8
|
||||
IkHATujj8IOs6jAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQBc
|
||||
Puk6i+yowwGccB3LhfxZ+Fz6s6/Lfx6bP/Hy4NYOxmx2/awGBgyfp1tmotjaS9Cf
|
||||
FWd67LuEru4TYtz12RNMDBF5ypcEfibvb3I8O6igOSQX/Jl5D2pMChesZxhmCift
|
||||
Qp09T41MA8PmHf1G9oMG0A3ZnjKDG5ebaJNRFImJhMHsgh/TP7V3uZy7YHTgopKX
|
||||
Hv63V3Uo3Oihav29Q7urwmf7Ly7X7J2WE86/w3vRHi5dhaWWqEqxmnAXl+H+sG4V
|
||||
meeVRI332bg1Nuy8KnnX8v3ZeJzMBkAhzvSr6Ri96R0/Un/oEFwVC5jDTq8sXVn6
|
||||
u7wlOSk+oFzDIO/UILIA
|
||||
-----END CERTIFICATE-----`;
|
||||
|
||||
const certWithoutHeader = certWithHeader
|
||||
.replace(/-----BEGIN CERTIFICATE-----/g, '')
|
||||
.replace(/-----END CERTIFICATE-----/g, '')
|
||||
.replace(/\s+/g, '');
|
||||
|
||||
it('should throw an error if SAML_CERT is not set', () => {
|
||||
process.env.SAML_CERT;
|
||||
expect(() => getCertificateContent(process.env.SAML_CERT)).toThrow(
|
||||
'Invalid input: SAML_CERT must be a string.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw an error if SAML_CERT is empty', () => {
|
||||
process.env.SAML_CERT = '';
|
||||
expect(() => getCertificateContent(process.env.SAML_CERT)).toThrow(
|
||||
'Invalid cert: SAML_CERT must be a valid file path or certificate string.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should load cert from an environment variable if it is a single-line string(with header)', () => {
|
||||
process.env.SAML_CERT = certWithHeader;
|
||||
|
||||
const actual = getCertificateContent(process.env.SAML_CERT);
|
||||
expect(actual).toBe(certWithHeader);
|
||||
});
|
||||
|
||||
it('should load cert from an environment variable if it is a single-line string(with no header)', () => {
|
||||
process.env.SAML_CERT = certWithoutHeader;
|
||||
|
||||
const actual = getCertificateContent(process.env.SAML_CERT);
|
||||
expect(actual).toBe(certWithoutHeader);
|
||||
});
|
||||
|
||||
it('should throw an error if SAML_CERT is a single-line string (with header, no newline characters)', () => {
|
||||
process.env.SAML_CERT = certWithHeader.replace(/\n/g, '');
|
||||
expect(() => getCertificateContent(process.env.SAML_CERT)).toThrow(
|
||||
'Invalid cert: SAML_CERT must be a valid file path or certificate string.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should load cert from a relative file path if SAML_CERT is valid', () => {
|
||||
process.env.SAML_CERT = 'test.pem';
|
||||
const resolvedPath = '/absolute/path/to/test.pem';
|
||||
|
||||
path.isAbsolute.mockReturnValue(false);
|
||||
path.join.mockReturnValue(resolvedPath);
|
||||
path.normalize.mockReturnValue(resolvedPath);
|
||||
|
||||
fs.existsSync.mockReturnValue(true);
|
||||
fs.statSync.mockReturnValue({ isFile: () => true });
|
||||
fs.readFileSync.mockReturnValue(certWithHeader);
|
||||
|
||||
const actual = getCertificateContent(process.env.SAML_CERT);
|
||||
expect(actual).toBe(certWithHeader);
|
||||
});
|
||||
|
||||
it('should load cert from an absolute file path if SAML_CERT is valid', () => {
|
||||
process.env.SAML_CERT = '/absolute/path/to/test.pem';
|
||||
|
||||
path.isAbsolute.mockReturnValue(true);
|
||||
path.normalize.mockReturnValue(process.env.SAML_CERT);
|
||||
|
||||
fs.existsSync.mockReturnValue(true);
|
||||
fs.statSync.mockReturnValue({ isFile: () => true });
|
||||
fs.readFileSync.mockReturnValue(certWithHeader);
|
||||
|
||||
const actual = getCertificateContent(process.env.SAML_CERT);
|
||||
expect(actual).toBe(certWithHeader);
|
||||
});
|
||||
|
||||
it('should throw an error if the file does not exist', () => {
|
||||
process.env.SAML_CERT = 'missing.pem';
|
||||
const resolvedPath = '/absolute/path/to/missing.pem';
|
||||
|
||||
path.isAbsolute.mockReturnValue(false);
|
||||
path.join.mockReturnValue(resolvedPath);
|
||||
path.normalize.mockReturnValue(resolvedPath);
|
||||
|
||||
fs.existsSync.mockReturnValue(false);
|
||||
|
||||
expect(() => getCertificateContent(process.env.SAML_CERT)).toThrow(
|
||||
'Invalid cert: SAML_CERT must be a valid file path or certificate string.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw an error if the file is not readable', () => {
|
||||
process.env.SAML_CERT = 'unreadable.pem';
|
||||
const resolvedPath = '/absolute/path/to/unreadable.pem';
|
||||
|
||||
path.isAbsolute.mockReturnValue(false);
|
||||
path.join.mockReturnValue(resolvedPath);
|
||||
path.normalize.mockReturnValue(resolvedPath);
|
||||
|
||||
fs.existsSync.mockReturnValue(true);
|
||||
fs.statSync.mockReturnValue({ isFile: () => true });
|
||||
fs.readFileSync.mockImplementation(() => {
|
||||
throw new Error('Permission denied');
|
||||
});
|
||||
|
||||
expect(() => getCertificateContent(process.env.SAML_CERT)).toThrow(
|
||||
'Error reading certificate file: Permission denied',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('setupSaml', () => {
|
||||
// Helper to wrap the verify callback in a promise
|
||||
const validate = (profile) =>
|
||||
new Promise((resolve, reject) => {
|
||||
verifyCallback(profile, (err, user, details) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve({ user, details });
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const baseProfile = {
|
||||
nameID: 'saml-1234',
|
||||
email: 'test@example.com',
|
||||
given_name: 'First',
|
||||
family_name: 'Last',
|
||||
name: 'My Full Name',
|
||||
username: 'flast',
|
||||
picture: 'https://example.com/avatar.png',
|
||||
custom_name: 'custom',
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
const cert = `
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDazCCAlOgAwIBAgIUKhXaFJGJJPx466rlwYORIsqCq7MwDQYJKoZIhvcNAQEL
|
||||
BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
|
||||
GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yNTAzMDQwODUxNTJaFw0yNjAz
|
||||
MDQwODUxNTJaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw
|
||||
HwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggEiMA0GCSqGSIb3DQEB
|
||||
AQUAA4IBDwAwggEKAoIBAQCWP09NZg0xaRiLpNygCVgV3M+4RFW2S0c5X/fg/uFT
|
||||
O5MfaVYzG5GxzhXzWRB8RtNPsxX/nlbPsoUroeHbz+SABkOsNEv6JuKRH4VXRH34
|
||||
VzjazVkPAwj+N4WqsC/Wo4EGGpKIGeGi8Zed4yvMqoTyE3mrS19fY0nMHT62wUwS
|
||||
GMm2pAQdAQePZ9WY7A5XOA1IoxW2Zh2Oxaf1p59epBkZDhoxSMu8GoSkvK27Km4A
|
||||
4UXftzdg/wHNPrNirmcYouioHdmrOtYxPjrhUBQ74AmE1/QK45B6wEgirKH1A1AW
|
||||
6C+ApLwpBMvy9+8Gbyvc8G18W3CjdEVKmAeWb9JUedSXAgMBAAGjUzBRMB0GA1Ud
|
||||
DgQWBBRxpaqBx8VDLLc8IkHATujj8IOs6jAfBgNVHSMEGDAWgBRxpaqBx8VDLLc8
|
||||
IkHATujj8IOs6jAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQBc
|
||||
Puk6i+yowwGccB3LhfxZ+Fz6s6/Lfx6bP/Hy4NYOxmx2/awGBgyfp1tmotjaS9Cf
|
||||
FWd67LuEru4TYtz12RNMDBF5ypcEfibvb3I8O6igOSQX/Jl5D2pMChesZxhmCift
|
||||
Qp09T41MA8PmHf1G9oMG0A3ZnjKDG5ebaJNRFImJhMHsgh/TP7V3uZy7YHTgopKX
|
||||
Hv63V3Uo3Oihav29Q7urwmf7Ly7X7J2WE86/w3vRHi5dhaWWqEqxmnAXl+H+sG4V
|
||||
meeVRI332bg1Nuy8KnnX8v3ZeJzMBkAhzvSr6Ri96R0/Un/oEFwVC5jDTq8sXVn6
|
||||
u7wlOSk+oFzDIO/UILIA
|
||||
-----END CERTIFICATE-----
|
||||
`;
|
||||
|
||||
// Reset environment variables
|
||||
process.env.SAML_ENTRY_POINT = 'https://example.com/saml';
|
||||
process.env.SAML_ISSUER = 'saml-issuer';
|
||||
process.env.SAML_CERT = cert;
|
||||
process.env.SAML_CALLBACK_URL = '/oauth/saml/callback';
|
||||
delete process.env.SAML_EMAIL_CLAIM;
|
||||
delete process.env.SAML_USERNAME_CLAIM;
|
||||
delete process.env.SAML_GIVEN_NAME_CLAIM;
|
||||
delete process.env.SAML_FAMILY_NAME_CLAIM;
|
||||
delete process.env.SAML_PICTURE_CLAIM;
|
||||
delete process.env.SAML_NAME_CLAIM;
|
||||
|
||||
findUser.mockResolvedValue(null);
|
||||
createUser.mockImplementation(async (userData) => ({
|
||||
_id: 'newUserId',
|
||||
...userData,
|
||||
}));
|
||||
updateUser.mockImplementation(async (id, userData) => ({
|
||||
_id: id,
|
||||
...userData,
|
||||
}));
|
||||
|
||||
// Simulate image download
|
||||
const fakeBuffer = Buffer.from('fake image');
|
||||
fetch.mockResolvedValue({
|
||||
ok: true,
|
||||
buffer: jest.fn().mockResolvedValue(fakeBuffer),
|
||||
});
|
||||
|
||||
await setupSaml();
|
||||
});
|
||||
|
||||
it('should create a new user with correct username when username claim exists', async () => {
|
||||
const profile = { ...baseProfile };
|
||||
const { user } = await validate(profile);
|
||||
|
||||
expect(user.username).toBe(profile.username);
|
||||
expect(createUser).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
provider: 'saml',
|
||||
samlId: profile.nameID,
|
||||
username: profile.username,
|
||||
email: profile.email,
|
||||
name: `${profile.given_name} ${profile.family_name}`,
|
||||
}),
|
||||
true,
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
it('should use given_name as username when username claim is missing', async () => {
|
||||
const profile = { ...baseProfile };
|
||||
delete profile.username;
|
||||
const expectUsername = profile.given_name;
|
||||
|
||||
const { user } = await validate(profile);
|
||||
|
||||
expect(user.username).toBe(expectUsername);
|
||||
expect(createUser).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ username: expectUsername }),
|
||||
true,
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
it('should use email as username when username and given_name are missing', async () => {
|
||||
const profile = { ...baseProfile };
|
||||
delete profile.username;
|
||||
delete profile.given_name;
|
||||
const expectUsername = profile.email;
|
||||
|
||||
const { user } = await validate(profile);
|
||||
|
||||
expect(user.username).toBe(expectUsername);
|
||||
expect(createUser).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ username: expectUsername }),
|
||||
true,
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
it('should override username with SAML_USERNAME_CLAIM when set', async () => {
|
||||
process.env.SAML_USERNAME_CLAIM = 'nameID';
|
||||
const profile = { ...baseProfile };
|
||||
|
||||
const { user } = await validate(profile);
|
||||
|
||||
expect(user.username).toBe(profile.nameID);
|
||||
expect(createUser).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ username: profile.nameID }),
|
||||
true,
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
it('should set the full name correctly when given_name and family_name exist', async () => {
|
||||
const profile = { ...baseProfile };
|
||||
const expectedFullName = `${profile.given_name} ${profile.family_name}`;
|
||||
|
||||
const { user } = await validate(profile);
|
||||
|
||||
expect(user.name).toBe(expectedFullName);
|
||||
});
|
||||
|
||||
it('should set the full name correctly when given_name exist', async () => {
|
||||
const profile = { ...baseProfile };
|
||||
delete profile.family_name;
|
||||
const expectedFullName = profile.given_name;
|
||||
|
||||
const { user } = await validate(profile);
|
||||
|
||||
expect(user.name).toBe(expectedFullName);
|
||||
});
|
||||
|
||||
it('should set the full name correctly when family_name exist', async () => {
|
||||
const profile = { ...baseProfile };
|
||||
delete profile.given_name;
|
||||
const expectedFullName = profile.family_name;
|
||||
|
||||
const { user } = await validate(profile);
|
||||
|
||||
expect(user.name).toBe(expectedFullName);
|
||||
});
|
||||
|
||||
it('should set the full name correctly when username exist', async () => {
|
||||
const profile = { ...baseProfile };
|
||||
delete profile.family_name;
|
||||
delete profile.given_name;
|
||||
const expectedFullName = profile.username;
|
||||
|
||||
const { user } = await validate(profile);
|
||||
|
||||
expect(user.name).toBe(expectedFullName);
|
||||
});
|
||||
|
||||
it('should set the full name correctly when email only exist', async () => {
|
||||
const profile = { ...baseProfile };
|
||||
delete profile.family_name;
|
||||
delete profile.given_name;
|
||||
delete profile.username;
|
||||
const expectedFullName = profile.email;
|
||||
|
||||
const { user } = await validate(profile);
|
||||
|
||||
expect(user.name).toBe(expectedFullName);
|
||||
});
|
||||
|
||||
it('should set the full name correctly with SAML_NAME_CLAIM when set', async () => {
|
||||
process.env.SAML_NAME_CLAIM = 'custom_name';
|
||||
const profile = { ...baseProfile };
|
||||
const expectedFullName = profile.custom_name;
|
||||
|
||||
const { user } = await validate(profile);
|
||||
|
||||
expect(user.name).toBe(expectedFullName);
|
||||
});
|
||||
|
||||
it('should update an existing user on login', async () => {
|
||||
const existingUser = {
|
||||
_id: 'existingUserId',
|
||||
provider: 'local',
|
||||
email: baseProfile.email,
|
||||
samlId: '',
|
||||
username: '',
|
||||
name: '',
|
||||
};
|
||||
|
||||
findUser.mockImplementation(async (query) => {
|
||||
if (query.samlId === baseProfile.nameID || query.email === baseProfile.email) {
|
||||
return existingUser;
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
const profile = { ...baseProfile };
|
||||
await validate(profile);
|
||||
|
||||
expect(updateUser).toHaveBeenCalledWith(
|
||||
existingUser._id,
|
||||
expect.objectContaining({
|
||||
provider: 'saml',
|
||||
samlId: baseProfile.nameID,
|
||||
username: baseProfile.username,
|
||||
name: `${baseProfile.given_name} ${baseProfile.family_name}`,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should attempt to download and save the avatar if picture is provided', async () => {
|
||||
const profile = { ...baseProfile };
|
||||
|
||||
const { user } = await validate(profile);
|
||||
|
||||
expect(fetch).toHaveBeenCalled();
|
||||
expect(user.avatar).toBe('/fake/path/to/avatar.png');
|
||||
});
|
||||
|
||||
it('should not attempt to download avatar if picture is not provided', async () => {
|
||||
const profile = { ...baseProfile };
|
||||
delete profile.picture;
|
||||
|
||||
await validate(profile);
|
||||
|
||||
expect(fetch).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
|
@ -7,7 +7,8 @@ const socialLogin =
|
|||
(provider, getProfileDetails) => async (accessToken, refreshToken, idToken, profile, cb) => {
|
||||
try {
|
||||
const { email, id, avatarUrl, username, name, emailVerified } = getProfileDetails({
|
||||
idToken, profile,
|
||||
idToken,
|
||||
profile,
|
||||
});
|
||||
|
||||
const oldUser = await findUser({ email: email.trim() });
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ jest.mock('winston', () => {
|
|||
mockFormatFunction.printf = jest.fn();
|
||||
mockFormatFunction.errors = jest.fn();
|
||||
mockFormatFunction.splat = jest.fn();
|
||||
mockFormatFunction.json = jest.fn();
|
||||
return {
|
||||
format: mockFormatFunction,
|
||||
createLogger: jest.fn().mockReturnValue({
|
||||
|
|
@ -19,6 +20,7 @@ jest.mock('winston', () => {
|
|||
transports: {
|
||||
Console: jest.fn(),
|
||||
DailyRotateFile: jest.fn(),
|
||||
File: jest.fn(),
|
||||
},
|
||||
addColors: jest.fn(),
|
||||
};
|
||||
|
|
|
|||
6
api/test/__mocks__/openid-client-passport.js
Normal file
6
api/test/__mocks__/openid-client-passport.js
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
// api/test/__mocks__/openid-client-passport.js
|
||||
const Strategy = jest.fn().mockImplementation((options, verify) => {
|
||||
return { name: 'mocked-openid-passport-strategy', options, verify };
|
||||
});
|
||||
|
||||
module.exports = { Strategy };
|
||||
67
api/test/__mocks__/openid-client.js
Normal file
67
api/test/__mocks__/openid-client.js
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
// api/test/__mocks__/openid-client.js
|
||||
module.exports = {
|
||||
Issuer: {
|
||||
discover: jest.fn().mockResolvedValue({
|
||||
Client: jest.fn().mockImplementation(() => ({
|
||||
authorizationUrl: jest.fn().mockReturnValue('mock_auth_url'),
|
||||
callback: jest.fn().mockResolvedValue({
|
||||
access_token: 'mock_access_token',
|
||||
id_token: 'mock_id_token',
|
||||
claims: () => ({
|
||||
sub: 'mock_sub',
|
||||
email: 'mock@example.com',
|
||||
}),
|
||||
}),
|
||||
userinfo: jest.fn().mockResolvedValue({
|
||||
sub: 'mock_sub',
|
||||
email: 'mock@example.com',
|
||||
}),
|
||||
})),
|
||||
}),
|
||||
},
|
||||
Strategy: jest.fn().mockImplementation((options, verify) => {
|
||||
// Store verify to call it if needed, or just mock the strategy behavior
|
||||
return { name: 'openid-mock-strategy' };
|
||||
}),
|
||||
custom: {
|
||||
setHttpOptionsDefaults: jest.fn(),
|
||||
},
|
||||
// Add any other exports from openid-client that are used directly
|
||||
// For example, if your code uses `client.Issuer.discover`, then mock `Issuer`
|
||||
// If it uses `new Strategy()`, then mock `Strategy`
|
||||
// Based on openidStrategy.js, it uses:
|
||||
// const client = require('openid-client'); -> client.discovery, client.fetchUserInfo, client.genericGrantRequest
|
||||
// const { Strategy: OpenIDStrategy } = require('openid-client/passport');
|
||||
// So the mock needs to cover these.
|
||||
// The provided mock in openidStrategy.spec.js is a good reference.
|
||||
|
||||
// Simpler mock based on the spec file:
|
||||
discovery: jest.fn().mockResolvedValue({
|
||||
clientId: 'fake_client_id',
|
||||
clientSecret: 'fake_client_secret',
|
||||
issuer: 'https://fake-issuer.com',
|
||||
Client: jest.fn().mockImplementation(() => ({
|
||||
authorizationUrl: jest.fn().mockReturnValue('mock_auth_url'),
|
||||
callback: jest.fn().mockResolvedValue({
|
||||
access_token: 'mock_access_token',
|
||||
id_token: 'mock_id_token',
|
||||
claims: () => ({
|
||||
sub: 'mock_sub',
|
||||
email: 'mock@example.com',
|
||||
}),
|
||||
}),
|
||||
userinfo: jest.fn().mockResolvedValue({
|
||||
sub: 'mock_sub',
|
||||
email: 'mock@example.com',
|
||||
}),
|
||||
grant: jest.fn().mockResolvedValue({ access_token: 'mock_grant_token' }), // For genericGrantRequest
|
||||
})),
|
||||
}),
|
||||
fetchUserInfo: jest.fn().mockResolvedValue({
|
||||
preferred_username: 'preferred_username',
|
||||
}),
|
||||
genericGrantRequest: jest
|
||||
.fn()
|
||||
.mockResolvedValue({ access_token: 'mock_grant_access_token', expires_in: 3600 }),
|
||||
customFetch: Symbol('customFetch'),
|
||||
};
|
||||
|
|
@ -6,3 +6,7 @@ process.env.BAN_VIOLATIONS = 'true';
|
|||
process.env.BAN_DURATION = '7200000';
|
||||
process.env.BAN_INTERVAL = '20';
|
||||
process.env.CI = 'true';
|
||||
process.env.JWT_SECRET = 'test';
|
||||
process.env.JWT_REFRESH_SECRET = 'test';
|
||||
process.env.CREDS_KEY = 'test';
|
||||
process.env.CREDS_IV = 'test';
|
||||
|
|
|
|||
|
|
@ -55,6 +55,12 @@
|
|||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports MessageContentComplex
|
||||
* @typedef {import('@librechat/agents').MessageContentComplex} MessageContentComplex
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports EventHandler
|
||||
* @typedef {import('@librechat/agents').EventHandler} EventHandler
|
||||
|
|
@ -186,6 +192,8 @@
|
|||
* agent_index: number;
|
||||
* last_agent_index: number;
|
||||
* hide_sequential_outputs: boolean;
|
||||
* version?: 'v1' | 'v2';
|
||||
* streamMode?: string
|
||||
* }> & {
|
||||
* toolCall?: LangChainToolCall & { stepId?: string };
|
||||
* }} GraphRunnableConfig
|
||||
|
|
@ -473,6 +481,25 @@
|
|||
* @typedef {import('librechat-data-provider').Agents.MessageContentImageUrl} MessageContentImageUrl
|
||||
* @memberof typedefs
|
||||
*/
|
||||
/** Web Search */
|
||||
|
||||
/**
|
||||
* @exports SearchResult
|
||||
* @typedef {import('@librechat/agents').SearchResult} SearchResult
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports SearchResultData
|
||||
* @typedef {import('@librechat/agents').SearchResultData} SearchResultData
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports ValidSource
|
||||
* @typedef {import('librechat-data-provider').ValidSource} ValidSource
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/** Prompts */
|
||||
/**
|
||||
|
|
@ -848,6 +875,12 @@
|
|||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports IPluginAuth
|
||||
* @typedef {import('@librechat/data-schemas').IPluginAuth} IPluginAuth
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports ObjectId
|
||||
* @typedef {import('mongoose').Types.ObjectId} ObjectId
|
||||
|
|
@ -990,6 +1023,18 @@
|
|||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports TEphemeralAgent
|
||||
* @typedef {import('librechat-data-provider').TEphemeralAgent} TEphemeralAgent
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports TWebSearchKeys
|
||||
* @typedef {import('librechat-data-provider').TWebSearchKeys} TWebSearchKeys
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports AgentToolResources
|
||||
* @typedef {import('librechat-data-provider').AgentToolResources} AgentToolResources
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ const logAxiosError = ({ message, error }) => {
|
|||
requestInfo: { method, url },
|
||||
stack,
|
||||
});
|
||||
} else if (error?.message?.includes('Cannot read properties of undefined (reading \'status\')')) {
|
||||
} else if (error?.message?.includes("Cannot read properties of undefined (reading 'status')")) {
|
||||
logMessage = `${message} It appears the request timed out or was unsuccessful: ${error.message}`;
|
||||
logger.error(logMessage, { stack });
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -105,6 +105,9 @@ const anthropicModels = {
|
|||
'claude-3.7-sonnet': 200000,
|
||||
'claude-3-5-sonnet-latest': 200000,
|
||||
'claude-3.5-sonnet-latest': 200000,
|
||||
'claude-sonnet-4': 200000,
|
||||
'claude-opus-4': 200000,
|
||||
'claude-4': 200000,
|
||||
};
|
||||
|
||||
const deepseekModels = {
|
||||
|
|
@ -246,6 +249,8 @@ const anthropicMaxOutputs = {
|
|||
'claude-3-haiku': 4096,
|
||||
'claude-3-sonnet': 4096,
|
||||
'claude-3-opus': 4096,
|
||||
'claude-opus-4': 32000,
|
||||
'claude-sonnet-4': 64000,
|
||||
'claude-3.5-sonnet': 8192,
|
||||
'claude-3-5-sonnet': 8192,
|
||||
'claude-3.7-sonnet': 128000,
|
||||
|
|
|
|||
|
|
@ -649,3 +649,58 @@ describe('Grok Model Tests - Tokens', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Claude Model Tests', () => {
|
||||
it('should return correct context length for Claude 4 models', () => {
|
||||
expect(getModelMaxTokens('claude-sonnet-4')).toBe(200000);
|
||||
expect(getModelMaxTokens('claude-opus-4')).toBe(200000);
|
||||
});
|
||||
|
||||
it('should handle Claude 4 model name variations with different prefixes and suffixes', () => {
|
||||
const modelVariations = [
|
||||
'claude-sonnet-4',
|
||||
'claude-sonnet-4-20240229',
|
||||
'claude-sonnet-4-latest',
|
||||
'anthropic/claude-sonnet-4',
|
||||
'claude-sonnet-4/anthropic',
|
||||
'claude-sonnet-4-preview',
|
||||
'claude-sonnet-4-20240229-preview',
|
||||
'claude-opus-4',
|
||||
'claude-opus-4-20240229',
|
||||
'claude-opus-4-latest',
|
||||
'anthropic/claude-opus-4',
|
||||
'claude-opus-4/anthropic',
|
||||
'claude-opus-4-preview',
|
||||
'claude-opus-4-20240229-preview',
|
||||
];
|
||||
|
||||
modelVariations.forEach((model) => {
|
||||
expect(getModelMaxTokens(model)).toBe(200000);
|
||||
});
|
||||
});
|
||||
|
||||
it('should match model names correctly for Claude 4 models', () => {
|
||||
const modelVariations = [
|
||||
'claude-sonnet-4',
|
||||
'claude-sonnet-4-20240229',
|
||||
'claude-sonnet-4-latest',
|
||||
'anthropic/claude-sonnet-4',
|
||||
'claude-sonnet-4/anthropic',
|
||||
'claude-sonnet-4-preview',
|
||||
'claude-sonnet-4-20240229-preview',
|
||||
'claude-opus-4',
|
||||
'claude-opus-4-20240229',
|
||||
'claude-opus-4-latest',
|
||||
'anthropic/claude-opus-4',
|
||||
'claude-opus-4/anthropic',
|
||||
'claude-opus-4-preview',
|
||||
'claude-opus-4-20240229-preview',
|
||||
];
|
||||
|
||||
modelVariations.forEach((model) => {
|
||||
const isSonnet = model.includes('sonnet');
|
||||
const expectedModel = isSonnet ? 'claude-sonnet-4' : 'claude-opus-4';
|
||||
expect(matchModelName(model, EModelEndpoint.anthropic)).toBe(expectedModel);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,4 +0,0 @@
|
|||
apiVersion: v2
|
||||
name: librechat
|
||||
type: application
|
||||
version: 1.0.0
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: {{ include "librechat.fullname" . }}-env
|
||||
labels:
|
||||
{{- include "librechat.labels" . | nindent 4 }}
|
||||
data:
|
||||
{{- range $key, $val := .Values.config.env }}
|
||||
{{ $key }}: {{ $val | quote }}
|
||||
{{- end }}
|
||||
|
|
@ -1,81 +0,0 @@
|
|||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{ include "librechat.fullname" . }}
|
||||
labels:
|
||||
{{- include "librechat.labels" . | nindent 4 }}
|
||||
spec:
|
||||
{{- if not .Values.autoscaling.enabled }}
|
||||
replicas: {{ .Values.replicaCount }}
|
||||
{{- end }}
|
||||
selector:
|
||||
matchLabels:
|
||||
{{- include "librechat.selectorLabels" . | nindent 6 }}
|
||||
template:
|
||||
metadata:
|
||||
{{- with .Values.podAnnotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
labels:
|
||||
{{- include "librechat.selectorLabels" . | nindent 8 }}
|
||||
{{- with .Values.podLabels }}
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
{{- with .Values.imagePullSecrets }}
|
||||
imagePullSecrets:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
serviceAccountName: {{ include "librechat.serviceAccountName" . }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.podSecurityContext | nindent 8 }}
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.securityContext | nindent 12 }}
|
||||
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
envFrom:
|
||||
{{ if .Values.config.envSecrets.secretRef }}
|
||||
- secretRef:
|
||||
name: {{ .Values.config.envSecrets.secretRef }}
|
||||
{{- end }}
|
||||
- configMapRef:
|
||||
name: {{ include "librechat.fullname" . }}-env
|
||||
env:
|
||||
{{- range $secretKeyRef := .Values.config.envSecrets.secretKeyRef }}
|
||||
- name: {{ $secretKeyRef.name }}
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ $secretKeyRef.secretName }}
|
||||
key: {{ $secretKeyRef.secretKey }}
|
||||
{{- end }}
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: 3080
|
||||
protocol: TCP
|
||||
livenessProbe:
|
||||
initialDelaySeconds: 5
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
readinessProbe:
|
||||
initialDelaySeconds: 5
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
resources:
|
||||
{{- toYaml .Values.resources | nindent 12 }}
|
||||
{{- with .Values.nodeSelector }}
|
||||
nodeSelector:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.tolerations }}
|
||||
tolerations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
{{- if .Values.serviceAccount.create -}}
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: {{ include "librechat.serviceAccountName" . }}
|
||||
labels:
|
||||
{{- include "librechat.labels" . | nindent 4 }}
|
||||
{{- with .Values.serviceAccount.annotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
|
@ -1,112 +0,0 @@
|
|||
# Default values for librechat.
|
||||
# This is a YAML-formatted file.
|
||||
# Declare variables to be passed into your templates.
|
||||
|
||||
replicaCount: 1
|
||||
|
||||
image:
|
||||
repository: ghcr.io/danny-avila/librechat
|
||||
pullPolicy: IfNotPresent
|
||||
# Overrides the image tag whose default is the chart appVersion.
|
||||
tag: "latest"
|
||||
|
||||
imagePullSecrets: []
|
||||
nameOverride: ""
|
||||
fullnameOverride: ""
|
||||
|
||||
serviceAccount:
|
||||
# Specifies whether a service account should be created
|
||||
create: true
|
||||
# Annotations to add to the service account
|
||||
annotations: {}
|
||||
# The name of the service account to use.
|
||||
# If not set and create is true, a name is generated using the fullname template
|
||||
name: ""
|
||||
|
||||
podAnnotations: {}
|
||||
|
||||
podLabels: {}
|
||||
|
||||
podSecurityContext: {}
|
||||
# fsGroup: 2000
|
||||
|
||||
securityContext: {}
|
||||
# capabilities:
|
||||
# drop:
|
||||
# - ALL
|
||||
# readOnlyRootFilesystem: true
|
||||
# runAsNonRoot: true
|
||||
# runAsUser: 1000
|
||||
|
||||
networkPolicies:
|
||||
enabled: true
|
||||
|
||||
service:
|
||||
type: LoadBalancer
|
||||
port: 80
|
||||
|
||||
ingress:
|
||||
enabled: true
|
||||
className: "nginx"
|
||||
annotations: {}
|
||||
# kubernetes.io/ingress.class: nginx
|
||||
# kubernetes.io/tls-acme: "true"
|
||||
hosts:
|
||||
- host: chat.example.com
|
||||
paths:
|
||||
- path: /
|
||||
pathType: ImplementationSpecific
|
||||
tls: []
|
||||
# - secretName: chart-example-tls
|
||||
# hosts:
|
||||
# - chart-example.local
|
||||
|
||||
resources: {}
|
||||
# limits:
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
# requests:
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
|
||||
autoscaling:
|
||||
enabled: false
|
||||
minReplicas: 1
|
||||
maxReplicas: 100
|
||||
targetCPUUtilizationPercentage: 80
|
||||
# targetMemoryUtilizationPercentage: 80
|
||||
|
||||
nodeSelector: {}
|
||||
|
||||
tolerations: []
|
||||
|
||||
affinity: {}
|
||||
|
||||
config:
|
||||
envSecrets:
|
||||
# Use this when using one k8s secret for multiply env secrets
|
||||
# secretRef: librechat
|
||||
|
||||
# Use this when using one k8s secret for each env secret
|
||||
secretKeyRef: []
|
||||
# - name: CREDS_IV
|
||||
# secretName: librechat
|
||||
# secretKey: CREDS_IV
|
||||
|
||||
env:
|
||||
# Full list of possible values
|
||||
# https://github.com/danny-avila/LibreChat/blob/main/.env.example
|
||||
ALLOW_EMAIL_LOGIN: "true"
|
||||
ALLOW_REGISTRATION: "true"
|
||||
ALLOW_SOCIAL_LOGIN: "false"
|
||||
ALLOW_SOCIAL_REGISTRATION: "false"
|
||||
APP_TITLE: "Librechat"
|
||||
CUSTOM_FOOTER: "Provided with ❤️"
|
||||
DEBUG_CONSOLE: "true"
|
||||
DEBUG_LOGGING: "true"
|
||||
DEBUG_OPENAI: "true"
|
||||
DEBUG_PLUGINS: "true"
|
||||
DOMAIN_CLIENT: ""
|
||||
DOMAIN_SERVER: ""
|
||||
ENDPOINTS: "openAI,azureOpenAI,chatGPTBrowser,google,gptPlugins,anthropic"
|
||||
SEARCH: false
|
||||
|
|
@ -6,7 +6,7 @@
|
|||
"scripts": {
|
||||
"data-provider": "cd .. && npm run build:data-provider",
|
||||
"build:file": "cross-env NODE_ENV=production vite build --debug > vite-output.log 2>&1",
|
||||
"build": "cross-env NODE_ENV=production vite build",
|
||||
"build": "cross-env NODE_ENV=production vite build && node ./scripts/post-build.cjs",
|
||||
"build:ci": "cross-env NODE_ENV=development vite build --mode ci",
|
||||
"dev": "cross-env NODE_ENV=development vite",
|
||||
"preview-prod": "cross-env NODE_ENV=development vite preview",
|
||||
|
|
@ -87,7 +87,7 @@
|
|||
"react-i18next": "^15.4.0",
|
||||
"react-lazy-load-image-component": "^1.6.0",
|
||||
"react-markdown": "^9.0.1",
|
||||
"react-resizable-panels": "^2.1.8",
|
||||
"react-resizable-panels": "^3.0.2",
|
||||
"react-router-dom": "^6.11.2",
|
||||
"react-speech-recognition": "^3.10.0",
|
||||
"react-textarea-autosize": "^8.4.0",
|
||||
|
|
@ -139,6 +139,7 @@
|
|||
"postcss": "^8.4.31",
|
||||
"postcss-loader": "^7.1.0",
|
||||
"postcss-preset-env": "^8.2.0",
|
||||
"rollup-plugin-visualizer": "^6.0.0",
|
||||
"tailwindcss": "^3.4.1",
|
||||
"ts-jest": "^29.2.5",
|
||||
"typescript": "^5.3.3",
|
||||
|
|
|
|||
1
client/public/assets/google.svg
Normal file
1
client/public/assets/google.svg
Normal file
|
|
@ -0,0 +1 @@
|
|||
<svg height="56" style="flex: 0 0 auto; line-height: 1;" viewBox="0 0 24 24" width="56" xmlns="http://www.w3.org/2000/svg"><title>Gemini</title><defs><linearGradient id="lobe-icons-gemini-fill" x1="0%" x2="68.73%" y1="100%" y2="30.395%"><stop offset="0%" stop-color="#1C7DFF"></stop><stop offset="52.021%" stop-color="#1C69FF"></stop><stop offset="100%" stop-color="#F0DCD6"></stop></linearGradient></defs><path d="M12 24A14.304 14.304 0 000 12 14.304 14.304 0 0012 0a14.305 14.305 0 0012 12 14.305 14.305 0 00-12 12" fill="url(#lobe-icons-gemini-fill)" fill-rule="nonzero"></path></svg>
|
||||
|
After Width: | Height: | Size: 587 B |
1
client/public/assets/openai.svg
Normal file
1
client/public/assets/openai.svg
Normal file
|
|
@ -0,0 +1 @@
|
|||
<svg fill="currentColor" fill-rule="evenodd" height="56" style="flex: 0 0 auto; line-height: 1;" viewBox="0 0 24 24" width="56" xmlns="http://www.w3.org/2000/svg"><title>OpenAI</title><path d="M21.55 10.004a5.416 5.416 0 00-.478-4.501c-1.217-2.09-3.662-3.166-6.05-2.66A5.59 5.59 0 0010.831 1C8.39.995 6.224 2.546 5.473 4.838A5.553 5.553 0 001.76 7.496a5.487 5.487 0 00.691 6.5 5.416 5.416 0 00.477 4.502c1.217 2.09 3.662 3.165 6.05 2.66A5.586 5.586 0 0013.168 23c2.443.006 4.61-1.546 5.361-3.84a5.553 5.553 0 003.715-2.66 5.488 5.488 0 00-.693-6.497v.001zm-8.381 11.558a4.199 4.199 0 01-2.675-.954c.034-.018.093-.05.132-.074l4.44-2.53a.71.71 0 00.364-.623v-6.176l1.877 1.069c.02.01.033.029.036.05v5.115c-.003 2.274-1.87 4.118-4.174 4.123zM4.192 17.78a4.059 4.059 0 01-.498-2.763c.032.02.09.055.131.078l4.44 2.53c.225.13.504.13.73 0l5.42-3.088v2.138a.068.068 0 01-.027.057L9.9 19.288c-1.999 1.136-4.552.46-5.707-1.51h-.001zM3.023 8.216A4.15 4.15 0 015.198 6.41l-.002.151v5.06a.711.711 0 00.364.624l5.42 3.087-1.876 1.07a.067.067 0 01-.063.005l-4.489-2.559c-1.995-1.14-2.679-3.658-1.53-5.63h.001zm15.417 3.54l-5.42-3.088L14.896 7.6a.067.067 0 01.063-.006l4.489 2.557c1.998 1.14 2.683 3.662 1.529 5.633a4.163 4.163 0 01-2.174 1.807V12.38a.71.71 0 00-.363-.623zm1.867-2.773a6.04 6.04 0 00-.132-.078l-4.44-2.53a.731.731 0 00-.729 0l-5.42 3.088V7.325a.068.068 0 01.027-.057L14.1 4.713c2-1.137 4.555-.46 5.707 1.513.487.833.664 1.809.499 2.757h.001zm-11.741 3.81l-1.877-1.068a.065.065 0 01-.036-.051V6.559c.001-2.277 1.873-4.122 4.181-4.12.976 0 1.92.338 2.671.954-.034.018-.092.05-.131.073l-4.44 2.53a.71.71 0 00-.365.623l-.003 6.173v.002zm1.02-2.168L12 9.25l2.414 1.375v2.75L12 14.75l-2.415-1.375v-2.75z"></path></svg>
|
||||
|
After Width: | Height: | Size: 1.7 KiB |
1
client/public/assets/qwen.svg
Normal file
1
client/public/assets/qwen.svg
Normal file
|
|
@ -0,0 +1 @@
|
|||
<svg height="56" style="flex: 0 0 auto; line-height: 1;" viewBox="0 0 24 24" width="56" xmlns="http://www.w3.org/2000/svg"><title>Qwen</title><defs><linearGradient id="lobe-icons-qwen-fill" x1="0%" x2="100%" y1="0%" y2="0%"><stop offset="0%" stop-color="#00055F" stop-opacity=".84"></stop><stop offset="100%" stop-color="#6F69F7" stop-opacity=".84"></stop></linearGradient></defs><path d="M12.604 1.34c.393.69.784 1.382 1.174 2.075a.18.18 0 00.157.091h5.552c.174 0 .322.11.446.327l1.454 2.57c.19.337.24.478.024.837-.26.43-.513.864-.76 1.3l-.367.658c-.106.196-.223.28-.04.512l2.652 4.637c.172.301.111.494-.043.77-.437.785-.882 1.564-1.335 2.34-.159.272-.352.375-.68.37-.777-.016-1.552-.01-2.327.016a.099.099 0 00-.081.05 575.097 575.097 0 01-2.705 4.74c-.169.293-.38.363-.725.364-.997.003-2.002.004-3.017.002a.537.537 0 01-.465-.271l-1.335-2.323a.09.09 0 00-.083-.049H4.982c-.285.03-.553-.001-.805-.092l-1.603-2.77a.543.543 0 01-.002-.54l1.207-2.12a.198.198 0 000-.197 550.951 550.951 0 01-1.875-3.272l-.79-1.395c-.16-.31-.173-.496.095-.965.465-.813.927-1.625 1.387-2.436.132-.234.304-.334.584-.335a338.3 338.3 0 012.589-.001.124.124 0 00.107-.063l2.806-4.895a.488.488 0 01.422-.246c.524-.001 1.053 0 1.583-.006L11.704 1c.341-.003.724.032.9.34zm-3.432.403a.06.06 0 00-.052.03L6.254 6.788a.157.157 0 01-.135.078H3.253c-.056 0-.07.025-.041.074l5.81 10.156c.025.042.013.062-.034.063l-2.795.015a.218.218 0 00-.2.116l-1.32 2.31c-.044.078-.021.118.068.118l5.716.008c.046 0 .08.02.104.061l1.403 2.454c.046.081.092.082.139 0l5.006-8.76.783-1.382a.055.055 0 01.096 0l1.424 2.53a.122.122 0 00.107.062l2.763-.02a.04.04 0 00.035-.02.041.041 0 000-.04l-2.9-5.086a.108.108 0 010-.113l.293-.507 1.12-1.977c.024-.041.012-.062-.035-.062H9.2c-.059 0-.073-.026-.043-.077l1.434-2.505a.107.107 0 000-.114L9.225 1.774a.06.06 0 00-.053-.031zm6.29 8.02c.046 0 .058.02.034.06l-.832 1.465-2.613 4.585a.056.056 0 01-.05.029.058.058 0 01-.05-.029L8.498 9.841c-.02-.034-.01-.052.028-.054l.216-.012 6.722-.012z" fill="url(#lobe-icons-qwen-fill)" fill-rule="nonzero"></path></svg>
|
||||
|
After Width: | Height: | Size: 2 KiB |
14
client/scripts/post-build.cjs
Normal file
14
client/scripts/post-build.cjs
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
const fs = require('fs-extra');
|
||||
|
||||
async function postBuild() {
|
||||
try {
|
||||
await fs.copy('public/assets', 'dist/assets');
|
||||
await fs.copy('public/robots.txt', 'dist/robots.txt');
|
||||
console.log('✅ PWA icons and robots.txt copied successfully. Glob pattern warnings resolved.');
|
||||
} catch (err) {
|
||||
console.error('❌ Error copying files:', err);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
postBuild();
|
||||
9
client/src/Providers/SearchContext.tsx
Normal file
9
client/src/Providers/SearchContext.tsx
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
import { createContext, useContext } from 'react';
|
||||
import type { SearchResultData } from 'librechat-data-provider';
|
||||
|
||||
type SearchContext = {
|
||||
searchResults?: { [key: string]: SearchResultData };
|
||||
};
|
||||
|
||||
export const SearchContext = createContext<SearchContext>({} as SearchContext);
|
||||
export const useSearchContext = () => useContext(SearchContext);
|
||||
|
|
@ -20,3 +20,4 @@ export * from './ArtifactContext';
|
|||
export * from './CodeBlockContext';
|
||||
export * from './ToolCallsMapContext';
|
||||
export * from './SetConvoContext';
|
||||
export * from './SearchContext';
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ export type TAgentOption = OptionWithIcon &
|
|||
};
|
||||
|
||||
export type TAgentCapabilities = {
|
||||
[AgentCapabilities.web_search]: boolean;
|
||||
[AgentCapabilities.file_search]: boolean;
|
||||
[AgentCapabilities.execute_code]: boolean;
|
||||
[AgentCapabilities.end_after_tools]?: boolean;
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue