diff --git a/.env.example b/.env.example
index db09bb471f..a6ff6157ce 100644
--- a/.env.example
+++ b/.env.example
@@ -64,11 +64,6 @@ CONSOLE_JSON=false
DEBUG_LOGGING=true
DEBUG_CONSOLE=false
-# Set to true to enable agent debug logging
-AGENT_DEBUG_LOGGING=false
-
-# Enable memory diagnostics (logs heap/RSS snapshots every 60s, auto-enabled with --inspect)
-# MEM_DIAG=true
#=============#
# Permissions #
@@ -198,10 +193,10 @@ GOOGLE_KEY=user_provided
# GOOGLE_AUTH_HEADER=true
# Gemini API (AI Studio)
-# GOOGLE_MODELS=gemini-3.1-pro-preview,gemini-3.1-pro-preview-customtools,gemini-3.1-flash-lite-preview,gemini-2.5-pro,gemini-2.5-flash,gemini-2.5-flash-lite,gemini-2.0-flash,gemini-2.0-flash-lite
+# GOOGLE_MODELS=gemini-2.5-pro,gemini-2.5-flash,gemini-2.5-flash-lite,gemini-2.0-flash,gemini-2.0-flash-lite
# Vertex AI
-# GOOGLE_MODELS=gemini-3.1-pro-preview,gemini-3.1-pro-preview-customtools,gemini-3.1-flash-lite-preview,gemini-2.5-pro,gemini-2.5-flash,gemini-2.5-flash-lite,gemini-2.0-flash-001,gemini-2.0-flash-lite-001
+# GOOGLE_MODELS=gemini-2.5-pro,gemini-2.5-flash,gemini-2.5-flash-lite,gemini-2.0-flash-001,gemini-2.0-flash-lite-001
# GOOGLE_TITLE_MODEL=gemini-2.0-flash-lite-001
@@ -248,6 +243,10 @@ GOOGLE_KEY=user_provided
# Option A: Use dedicated Gemini API key for image generation
# GEMINI_API_KEY=your-gemini-api-key
+# Option B: Use Vertex AI (no API key needed, uses service account)
+# Set this to enable Vertex AI and allow tool without requiring API keys
+# GEMINI_VERTEX_ENABLED=true
+
# Vertex AI model for image generation (defaults to gemini-2.5-flash-image)
# GEMINI_IMAGE_MODEL=gemini-2.5-flash-image
@@ -515,9 +514,6 @@ OPENID_ADMIN_ROLE_TOKEN_KIND=
OPENID_USERNAME_CLAIM=
# Set to determine which user info property returned from OpenID Provider to store as the User's name
OPENID_NAME_CLAIM=
-# Set to determine which user info claim to use as the email/identifier for user matching (e.g., "upn" for Entra ID)
-# When not set, defaults to: email -> preferred_username -> upn
-OPENID_EMAIL_CLAIM=
# Optional audience parameter for OpenID authorization requests
OPENID_AUDIENCE=
@@ -542,8 +538,6 @@ OPENID_ON_BEHALF_FLOW_USERINFO_SCOPE="user.read" # example for Scope Needed for
OPENID_USE_END_SESSION_ENDPOINT=
# URL to redirect to after OpenID logout (defaults to ${DOMAIN_CLIENT}/login)
OPENID_POST_LOGOUT_REDIRECT_URI=
-# Maximum logout URL length before using logout_hint instead of id_token_hint (default: 2000)
-OPENID_MAX_LOGOUT_URL_LENGTH=
#========================#
# SharePoint Integration #
@@ -627,7 +621,6 @@ EMAIL_PORT=25
EMAIL_ENCRYPTION=
EMAIL_ENCRYPTION_HOSTNAME=
EMAIL_ALLOW_SELFSIGNED=
-# Leave both empty for SMTP servers that do not require authentication
EMAIL_USERNAME=
EMAIL_PASSWORD=
EMAIL_FROM_NAME=
@@ -665,9 +658,6 @@ AWS_ACCESS_KEY_ID=
AWS_SECRET_ACCESS_KEY=
AWS_REGION=
AWS_BUCKET_NAME=
-# Required for path-style S3-compatible providers (MinIO, Hetzner, Backblaze B2, etc.)
-# that don't support virtual-hosted-style URLs (bucket.endpoint). Not needed for AWS S3.
-# AWS_FORCE_PATH_STYLE=false
#========================#
# Azure Blob Storage #
@@ -682,8 +672,7 @@ AZURE_CONTAINER_NAME=files
#========================#
ALLOW_SHARED_LINKS=true
-# Allows unauthenticated access to shared links. Defaults to false (auth required) if not set.
-ALLOW_SHARED_LINKS_PUBLIC=false
+ALLOW_SHARED_LINKS_PUBLIC=true
#==============================#
# Static File Cache Control #
@@ -855,24 +844,3 @@ OPENWEATHER_API_KEY=
# Skip code challenge method validation (e.g., for AWS Cognito that supports S256 but doesn't advertise it)
# When set to true, forces S256 code challenge even if not advertised in .well-known/openid-configuration
# MCP_SKIP_CODE_CHALLENGE_CHECK=false
-
-# Circuit breaker: max connect/disconnect cycles before tripping (per server)
-# MCP_CB_MAX_CYCLES=7
-
-# Circuit breaker: sliding window (ms) for counting cycles
-# MCP_CB_CYCLE_WINDOW_MS=45000
-
-# Circuit breaker: cooldown (ms) after the cycle breaker trips
-# MCP_CB_CYCLE_COOLDOWN_MS=15000
-
-# Circuit breaker: max consecutive failed connection rounds before backoff
-# MCP_CB_MAX_FAILED_ROUNDS=3
-
-# Circuit breaker: sliding window (ms) for counting failed rounds
-# MCP_CB_FAILED_WINDOW_MS=120000
-
-# Circuit breaker: base backoff (ms) after failed round threshold is reached
-# MCP_CB_BASE_BACKOFF_MS=30000
-
-# Circuit breaker: max backoff cap (ms) for exponential backoff
-# MCP_CB_MAX_BACKOFF_MS=300000
diff --git a/.gitattributes b/.gitattributes
deleted file mode 100644
index 725ac8b6bd..0000000000
--- a/.gitattributes
+++ /dev/null
@@ -1,3 +0,0 @@
-# Force LF line endings for shell scripts and git hooks (required for cross-platform compatibility)
-.husky/* text eol=lf
-*.sh text eol=lf
diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
index ae9e6d8e4b..ad0a75ab9b 100644
--- a/.github/CONTRIBUTING.md
+++ b/.github/CONTRIBUTING.md
@@ -26,14 +26,18 @@ Project maintainers have the right and responsibility to remove, edit, or reject
## 1. Development Setup
-1. Use Node.js v20.19.0+ or ^22.12.0 or >= 23.0.0.
-2. Run `npm run smart-reinstall` to install dependencies (uses Turborepo). Use `npm run reinstall` for a clean install, or `npm ci` for a fresh lockfile-based install.
-3. Build all compiled code: `npm run build`.
-4. Setup and run unit tests:
+1. Use Node.JS 20.x.
+2. Install typescript globally: `npm i -g typescript`.
+3. Run `npm ci` to install dependencies.
+4. Build the data provider: `npm run build:data-provider`.
+5. Build data schemas: `npm run build:data-schemas`.
+6. Build API methods: `npm run build:api`.
+7. Setup and run unit tests:
- Copy `.env.test`: `cp api/test/.env.test.example api/test/.env.test`.
- Run backend unit tests: `npm run test:api`.
- Run frontend unit tests: `npm run test:client`.
-5. Setup and run integration tests:
+8. Setup and run integration tests:
+ - Build client: `cd client && npm run build`.
- Create `.env`: `cp .env.example .env`.
- Install [MongoDB Community Edition](https://www.mongodb.com/docs/manual/administration/install-community/), ensure that `mongosh` connects to your local instance.
- Run: `npx install playwright`, then `npx playwright install`.
@@ -44,11 +48,11 @@ Project maintainers have the right and responsibility to remove, edit, or reject
## 2. Development Notes
1. Before starting work, make sure your main branch has the latest commits with `npm run update`.
-2. Run linting command to find errors: `npm run lint`. Alternatively, ensure husky pre-commit checks are functioning.
+3. Run linting command to find errors: `npm run lint`. Alternatively, ensure husky pre-commit checks are functioning.
3. After your changes, reinstall packages in your current branch using `npm run reinstall` and ensure everything still works.
- Restart the ESLint server ("ESLint: Restart ESLint Server" in VS Code command bar) and your IDE after reinstalling or updating.
4. Clear web app localStorage and cookies before and after changes.
-5. To check for introduced errors, build all compiled code: `npm run build`.
+5. For frontend changes, compile typescript before and after changes to check for introduced errors: `cd client && npm run build`.
6. Run backend unit tests: `npm run test:api`.
7. Run frontend unit tests: `npm run test:client`.
8. Run integration tests: `npm run e2e`.
@@ -114,45 +118,50 @@ Apply the following naming conventions to branches, labels, and other Git-relate
- **JS/TS:** Directories and file names: Descriptive and camelCase. First letter uppercased for React files (e.g., `helperFunction.ts, ReactComponent.tsx`).
- **Docs:** Directories and file names: Descriptive and snake_case (e.g., `config_files.md`).
-## 7. Coding Standards
-
-For detailed coding conventions, workspace boundaries, and architecture guidance, refer to the [`AGENTS.md`](../AGENTS.md) file at the project root. It covers code style, type safety, import ordering, iteration/performance expectations, frontend rules, testing, and development commands.
-
-## 8. TypeScript Conversion
+## 7. TypeScript Conversion
1. **Original State**: The project was initially developed entirely in JavaScript (JS).
-2. **Frontend**: Fully transitioned to TypeScript.
+2. **Frontend Transition**:
+ - We are in the process of transitioning the frontend from JS to TypeScript (TS).
+ - The transition is nearing completion.
+ - This conversion is feasible due to React's capability to intermix JS and TS prior to code compilation. It's standard practice to compile/bundle the code in such scenarios.
-3. **Backend**:
- - The legacy Express.js server remains in `/api` as JavaScript.
- - All new backend code is written in TypeScript under `/packages/api`, which is compiled and consumed by `/api`.
- - Shared database logic lives in `/packages/data-schemas` (TypeScript).
- - Shared frontend/backend API types and services live in `/packages/data-provider` (TypeScript).
- - Minimize direct changes to `/api`; prefer adding TypeScript code to `/packages/api` and importing it.
+3. **Backend Considerations**:
+ - Transitioning the backend to TypeScript would be a more intricate process, especially for an established Express.js server.
+
+ - **Options for Transition**:
+ - **Single Phase Overhaul**: This involves converting the entire backend to TypeScript in one go. It's the most straightforward approach but can be disruptive, especially for larger codebases.
+
+ - **Incremental Transition**: Convert parts of the backend progressively. This can be done by:
+ - Maintaining a separate directory for TypeScript files.
+ - Gradually migrating and testing individual modules or routes.
+ - Using a build tool like `tsc` to compile TypeScript files independently until the entire transition is complete.
+
+ - **Compilation Considerations**:
+ - Introducing a compilation step for the server is an option. This would involve using tools like `ts-node` for development and `tsc` for production builds.
+ - However, this is not a conventional approach for Express.js servers and could introduce added complexity, especially in terms of build and deployment processes.
+
+ - **Current Stance**: At present, this backend transition is of lower priority and might not be pursued.
-## 9. Module Import Conventions
+## 8. Module Import Conventions
-Imports are organized into three sections (in order):
+- `npm` packages first,
+ - from longest line (top) to shortest (bottom)
-1. **Package imports** — sorted from shortest to longest line length.
- - `react` is always the first import.
- - Multi-line (stacked) imports count their total character length across all lines for sorting.
+- Followed by typescript types (pertains to data-provider and client workspaces)
+ - longest line (top) to shortest (bottom)
+ - types from package come first
-2. **`import type` imports** — sorted from longest to shortest line length.
- - Package type imports come first, then local type imports.
- - Line length sorting resets between the package and local sub-groups.
-
-3. **Local/project imports** — sorted from longest to shortest line length.
- - Multi-line (stacked) imports count their total character length across all lines for sorting.
- - Imports with alias `~` are treated the same as relative imports with respect to line length.
-
-- Consolidate value imports from the same module as much as possible.
-- Always use standalone `import type { ... }` for type imports; never use inline `type` keyword inside value imports (e.g., `import { Foo, type Bar }` is wrong).
+- Lastly, local imports
+ - longest line (top) to shortest (bottom)
+ - imports with alias `~` treated the same as relative import with respect to line length
**Note:** ESLint will automatically enforce these import conventions when you run `npm run lint --fix` or through pre-commit hooks.
-For the full set of coding standards, see [`AGENTS.md`](../AGENTS.md).
+---
+
+Please ensure that you adapt this summary to fit the specific context and nuances of your project.
---
diff --git a/.github/workflows/backend-review.yml b/.github/workflows/backend-review.yml
index 9dd3905c0e..2379b8fee7 100644
--- a/.github/workflows/backend-review.yml
+++ b/.github/workflows/backend-review.yml
@@ -9,218 +9,11 @@ on:
paths:
- 'api/**'
- 'packages/**'
-
-env:
- NODE_ENV: CI
- NODE_OPTIONS: '--max-old-space-size=${{ secrets.NODE_MAX_OLD_SPACE_SIZE || 6144 }}'
-
jobs:
- build:
- name: Build packages
+ tests_Backend:
+ name: Run Backend unit tests
+ timeout-minutes: 60
runs-on: ubuntu-latest
- timeout-minutes: 15
- steps:
- - uses: actions/checkout@v4
-
- - name: Use Node.js 20.19
- uses: actions/setup-node@v4
- with:
- node-version: '20.19'
-
- - name: Restore node_modules cache
- id: cache-node-modules
- uses: actions/cache@v4
- with:
- path: |
- node_modules
- api/node_modules
- packages/api/node_modules
- packages/data-provider/node_modules
- packages/data-schemas/node_modules
- key: node-modules-backend-${{ runner.os }}-20.19-${{ hashFiles('package-lock.json') }}
-
- - name: Install dependencies
- if: steps.cache-node-modules.outputs.cache-hit != 'true'
- run: npm ci
-
- - name: Restore data-provider build cache
- id: cache-data-provider
- uses: actions/cache@v4
- with:
- path: packages/data-provider/dist
- key: build-data-provider-${{ runner.os }}-${{ hashFiles('packages/data-provider/src/**', 'packages/data-provider/tsconfig*.json', 'packages/data-provider/rollup.config.js', 'packages/data-provider/package.json') }}
-
- - name: Build data-provider
- if: steps.cache-data-provider.outputs.cache-hit != 'true'
- run: npm run build:data-provider
-
- - name: Restore data-schemas build cache
- id: cache-data-schemas
- uses: actions/cache@v4
- with:
- path: packages/data-schemas/dist
- key: build-data-schemas-${{ runner.os }}-${{ hashFiles('packages/data-schemas/src/**', 'packages/data-schemas/tsconfig*.json', 'packages/data-schemas/rollup.config.js', 'packages/data-schemas/package.json', 'packages/data-provider/src/**', 'packages/data-provider/tsconfig*.json', 'packages/data-provider/rollup.config.js', 'packages/data-provider/package.json') }}
-
- - name: Build data-schemas
- if: steps.cache-data-schemas.outputs.cache-hit != 'true'
- run: npm run build:data-schemas
-
- - name: Restore api build cache
- id: cache-api
- uses: actions/cache@v4
- with:
- path: packages/api/dist
- key: build-api-${{ runner.os }}-${{ hashFiles('packages/api/src/**', 'packages/api/tsconfig*.json', 'packages/api/server-rollup.config.js', 'packages/api/package.json', 'packages/data-provider/src/**', 'packages/data-provider/tsconfig*.json', 'packages/data-provider/rollup.config.js', 'packages/data-provider/package.json', 'packages/data-schemas/src/**', 'packages/data-schemas/tsconfig*.json', 'packages/data-schemas/rollup.config.js', 'packages/data-schemas/package.json') }}
-
- - name: Build api
- if: steps.cache-api.outputs.cache-hit != 'true'
- run: npm run build:api
-
- - name: Upload data-provider build
- uses: actions/upload-artifact@v4
- with:
- name: build-data-provider
- path: packages/data-provider/dist
- retention-days: 2
-
- - name: Upload data-schemas build
- uses: actions/upload-artifact@v4
- with:
- name: build-data-schemas
- path: packages/data-schemas/dist
- retention-days: 2
-
- - name: Upload api build
- uses: actions/upload-artifact@v4
- with:
- name: build-api
- path: packages/api/dist
- retention-days: 2
-
- typecheck:
- name: TypeScript type checks
- needs: build
- runs-on: ubuntu-latest
- timeout-minutes: 10
- steps:
- - uses: actions/checkout@v4
-
- - name: Use Node.js 20.19
- uses: actions/setup-node@v4
- with:
- node-version: '20.19'
-
- - name: Restore node_modules cache
- id: cache-node-modules
- uses: actions/cache@v4
- with:
- path: |
- node_modules
- api/node_modules
- packages/api/node_modules
- packages/data-provider/node_modules
- packages/data-schemas/node_modules
- key: node-modules-backend-${{ runner.os }}-20.19-${{ hashFiles('package-lock.json') }}
-
- - name: Install dependencies
- if: steps.cache-node-modules.outputs.cache-hit != 'true'
- run: npm ci
-
- - name: Download data-provider build
- uses: actions/download-artifact@v4
- with:
- name: build-data-provider
- path: packages/data-provider/dist
-
- - name: Download data-schemas build
- uses: actions/download-artifact@v4
- with:
- name: build-data-schemas
- path: packages/data-schemas/dist
-
- - name: Download api build
- uses: actions/download-artifact@v4
- with:
- name: build-api
- path: packages/api/dist
-
- - name: Type check data-provider
- run: npx tsc --noEmit -p packages/data-provider/tsconfig.json
-
- - name: Type check data-schemas
- run: npx tsc --noEmit -p packages/data-schemas/tsconfig.json
-
- - name: Type check @librechat/api
- run: npx tsc --noEmit -p packages/api/tsconfig.json
-
- - name: Type check @librechat/client
- run: npx tsc --noEmit -p packages/client/tsconfig.json
-
- circular-deps:
- name: Circular dependency checks
- needs: build
- runs-on: ubuntu-latest
- timeout-minutes: 10
- steps:
- - uses: actions/checkout@v4
-
- - name: Use Node.js 20.19
- uses: actions/setup-node@v4
- with:
- node-version: '20.19'
-
- - name: Restore node_modules cache
- id: cache-node-modules
- uses: actions/cache@v4
- with:
- path: |
- node_modules
- api/node_modules
- packages/api/node_modules
- packages/data-provider/node_modules
- packages/data-schemas/node_modules
- key: node-modules-backend-${{ runner.os }}-20.19-${{ hashFiles('package-lock.json') }}
-
- - name: Install dependencies
- if: steps.cache-node-modules.outputs.cache-hit != 'true'
- run: npm ci
-
- - name: Download data-provider build
- uses: actions/download-artifact@v4
- with:
- name: build-data-provider
- path: packages/data-provider/dist
-
- - name: Download data-schemas build
- uses: actions/download-artifact@v4
- with:
- name: build-data-schemas
- path: packages/data-schemas/dist
-
- - name: Rebuild @librechat/api and check for circular dependencies
- run: |
- output=$(npm run build:api 2>&1)
- echo "$output"
- if echo "$output" | grep -q "Circular depend"; then
- echo "Error: Circular dependency detected in @librechat/api!"
- exit 1
- fi
-
- - name: Detect circular dependencies in rollup
- working-directory: ./packages/data-provider
- run: |
- output=$(npm run rollup:api)
- echo "$output"
- if echo "$output" | grep -q "Circular dependency"; then
- echo "Error: Circular dependency detected!"
- exit 1
- fi
-
- test-api:
- name: 'Tests: api'
- needs: build
- runs-on: ubuntu-latest
- timeout-minutes: 15
env:
MONGO_URI: ${{ secrets.MONGO_URI }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
@@ -230,187 +23,54 @@ jobs:
BAN_VIOLATIONS: ${{ secrets.BAN_VIOLATIONS }}
BAN_DURATION: ${{ secrets.BAN_DURATION }}
BAN_INTERVAL: ${{ secrets.BAN_INTERVAL }}
+ NODE_ENV: CI
+ NODE_OPTIONS: '--max-old-space-size=${{ secrets.NODE_MAX_OLD_SPACE_SIZE || 6144 }}'
steps:
- uses: actions/checkout@v4
-
- - name: Use Node.js 20.19
+ - name: Use Node.js 20.x
uses: actions/setup-node@v4
with:
- node-version: '20.19'
-
- - name: Restore node_modules cache
- id: cache-node-modules
- uses: actions/cache@v4
- with:
- path: |
- node_modules
- api/node_modules
- packages/api/node_modules
- packages/data-provider/node_modules
- packages/data-schemas/node_modules
- key: node-modules-backend-${{ runner.os }}-20.19-${{ hashFiles('package-lock.json') }}
+ node-version: 20
+ cache: 'npm'
- name: Install dependencies
- if: steps.cache-node-modules.outputs.cache-hit != 'true'
run: npm ci
- - name: Download data-provider build
- uses: actions/download-artifact@v4
- with:
- name: build-data-provider
- path: packages/data-provider/dist
+ - name: Install Data Provider Package
+ run: npm run build:data-provider
- - name: Download data-schemas build
- uses: actions/download-artifact@v4
- with:
- name: build-data-schemas
- path: packages/data-schemas/dist
+ - name: Install Data Schemas Package
+ run: npm run build:data-schemas
- - name: Download api build
- uses: actions/download-artifact@v4
- with:
- name: build-api
- path: packages/api/dist
+ - name: Install API Package
+ run: npm run build:api
- name: Create empty auth.json file
run: |
mkdir -p api/data
echo '{}' > api/data/auth.json
+ - name: Check for Circular dependency in rollup
+ working-directory: ./packages/data-provider
+ run: |
+ output=$(npm run rollup:api)
+ echo "$output"
+ if echo "$output" | grep -q "Circular dependency"; then
+ echo "Error: Circular dependency detected!"
+ exit 1
+ fi
+
- name: Prepare .env.test file
run: cp api/test/.env.test.example api/test/.env.test
- name: Run unit tests
run: cd api && npm run test:ci
- test-data-provider:
- name: 'Tests: data-provider'
- needs: build
- runs-on: ubuntu-latest
- timeout-minutes: 10
- steps:
- - uses: actions/checkout@v4
-
- - name: Use Node.js 20.19
- uses: actions/setup-node@v4
- with:
- node-version: '20.19'
-
- - name: Restore node_modules cache
- id: cache-node-modules
- uses: actions/cache@v4
- with:
- path: |
- node_modules
- api/node_modules
- packages/api/node_modules
- packages/data-provider/node_modules
- packages/data-schemas/node_modules
- key: node-modules-backend-${{ runner.os }}-20.19-${{ hashFiles('package-lock.json') }}
-
- - name: Install dependencies
- if: steps.cache-node-modules.outputs.cache-hit != 'true'
- run: npm ci
-
- - name: Download data-provider build
- uses: actions/download-artifact@v4
- with:
- name: build-data-provider
- path: packages/data-provider/dist
-
- - name: Run unit tests
+ - name: Run librechat-data-provider unit tests
run: cd packages/data-provider && npm run test:ci
- test-data-schemas:
- name: 'Tests: data-schemas'
- needs: build
- runs-on: ubuntu-latest
- timeout-minutes: 10
- steps:
- - uses: actions/checkout@v4
-
- - name: Use Node.js 20.19
- uses: actions/setup-node@v4
- with:
- node-version: '20.19'
-
- - name: Restore node_modules cache
- id: cache-node-modules
- uses: actions/cache@v4
- with:
- path: |
- node_modules
- api/node_modules
- packages/api/node_modules
- packages/data-provider/node_modules
- packages/data-schemas/node_modules
- key: node-modules-backend-${{ runner.os }}-20.19-${{ hashFiles('package-lock.json') }}
-
- - name: Install dependencies
- if: steps.cache-node-modules.outputs.cache-hit != 'true'
- run: npm ci
-
- - name: Download data-provider build
- uses: actions/download-artifact@v4
- with:
- name: build-data-provider
- path: packages/data-provider/dist
-
- - name: Download data-schemas build
- uses: actions/download-artifact@v4
- with:
- name: build-data-schemas
- path: packages/data-schemas/dist
-
- - name: Run unit tests
+ - name: Run @librechat/data-schemas unit tests
run: cd packages/data-schemas && npm run test:ci
- test-packages-api:
- name: 'Tests: @librechat/api'
- needs: build
- runs-on: ubuntu-latest
- timeout-minutes: 10
- steps:
- - uses: actions/checkout@v4
-
- - name: Use Node.js 20.19
- uses: actions/setup-node@v4
- with:
- node-version: '20.19'
-
- - name: Restore node_modules cache
- id: cache-node-modules
- uses: actions/cache@v4
- with:
- path: |
- node_modules
- api/node_modules
- packages/api/node_modules
- packages/data-provider/node_modules
- packages/data-schemas/node_modules
- key: node-modules-backend-${{ runner.os }}-20.19-${{ hashFiles('package-lock.json') }}
-
- - name: Install dependencies
- if: steps.cache-node-modules.outputs.cache-hit != 'true'
- run: npm ci
-
- - name: Download data-provider build
- uses: actions/download-artifact@v4
- with:
- name: build-data-provider
- path: packages/data-provider/dist
-
- - name: Download data-schemas build
- uses: actions/download-artifact@v4
- with:
- name: build-data-schemas
- path: packages/data-schemas/dist
-
- - name: Download api build
- uses: actions/download-artifact@v4
- with:
- name: build-api
- path: packages/api/dist
-
- - name: Run unit tests
+ - name: Run @librechat/api unit tests
run: cd packages/api && npm run test:ci
diff --git a/.github/workflows/frontend-review.yml b/.github/workflows/frontend-review.yml
index 9c2d4a37b1..989e2e4abe 100644
--- a/.github/workflows/frontend-review.yml
+++ b/.github/workflows/frontend-review.yml
@@ -2,7 +2,7 @@ name: Frontend Unit Tests
on:
pull_request:
- branches:
+ branches:
- main
- dev
- dev-staging
@@ -11,200 +11,51 @@ on:
- 'client/**'
- 'packages/data-provider/**'
-env:
- NODE_OPTIONS: '--max-old-space-size=${{ secrets.NODE_MAX_OLD_SPACE_SIZE || 6144 }}'
-
jobs:
- build:
- name: Build packages
+ tests_frontend_ubuntu:
+ name: Run frontend unit tests on Ubuntu
+ timeout-minutes: 60
runs-on: ubuntu-latest
- timeout-minutes: 15
+ env:
+ NODE_OPTIONS: '--max-old-space-size=${{ secrets.NODE_MAX_OLD_SPACE_SIZE || 6144 }}'
steps:
- uses: actions/checkout@v4
-
- - name: Use Node.js 20.19
+ - name: Use Node.js 20.x
uses: actions/setup-node@v4
with:
- node-version: '20.19'
-
- - name: Restore node_modules cache
- id: cache-node-modules
- uses: actions/cache@v4
- with:
- path: |
- node_modules
- client/node_modules
- packages/client/node_modules
- packages/data-provider/node_modules
- key: node-modules-frontend-${{ runner.os }}-20.19-${{ hashFiles('package-lock.json') }}
+ node-version: 20
+ cache: 'npm'
- name: Install dependencies
- if: steps.cache-node-modules.outputs.cache-hit != 'true'
run: npm ci
- - name: Restore data-provider build cache
- id: cache-data-provider
- uses: actions/cache@v4
- with:
- path: packages/data-provider/dist
- key: build-data-provider-${{ runner.os }}-${{ hashFiles('packages/data-provider/src/**', 'packages/data-provider/tsconfig*.json', 'packages/data-provider/rollup.config.js', 'packages/data-provider/package.json') }}
-
- - name: Build data-provider
- if: steps.cache-data-provider.outputs.cache-hit != 'true'
- run: npm run build:data-provider
-
- - name: Restore client-package build cache
- id: cache-client-package
- uses: actions/cache@v4
- with:
- path: packages/client/dist
- key: build-client-package-${{ runner.os }}-${{ hashFiles('packages/client/src/**', 'packages/client/tsconfig*.json', 'packages/client/rollup.config.js', 'packages/client/package.json', 'packages/data-provider/src/**', 'packages/data-provider/tsconfig*.json', 'packages/data-provider/rollup.config.js', 'packages/data-provider/package.json') }}
-
- - name: Build client-package
- if: steps.cache-client-package.outputs.cache-hit != 'true'
- run: npm run build:client-package
-
- - name: Upload data-provider build
- uses: actions/upload-artifact@v4
- with:
- name: build-data-provider
- path: packages/data-provider/dist
- retention-days: 2
-
- - name: Upload client-package build
- uses: actions/upload-artifact@v4
- with:
- name: build-client-package
- path: packages/client/dist
- retention-days: 2
-
- test-ubuntu:
- name: 'Tests: Ubuntu'
- needs: build
- runs-on: ubuntu-latest
- timeout-minutes: 15
- steps:
- - uses: actions/checkout@v4
-
- - name: Use Node.js 20.19
- uses: actions/setup-node@v4
- with:
- node-version: '20.19'
-
- - name: Restore node_modules cache
- id: cache-node-modules
- uses: actions/cache@v4
- with:
- path: |
- node_modules
- client/node_modules
- packages/client/node_modules
- packages/data-provider/node_modules
- key: node-modules-frontend-${{ runner.os }}-20.19-${{ hashFiles('package-lock.json') }}
-
- - name: Install dependencies
- if: steps.cache-node-modules.outputs.cache-hit != 'true'
- run: npm ci
-
- - name: Download data-provider build
- uses: actions/download-artifact@v4
- with:
- name: build-data-provider
- path: packages/data-provider/dist
-
- - name: Download client-package build
- uses: actions/download-artifact@v4
- with:
- name: build-client-package
- path: packages/client/dist
+ - name: Build Client
+ run: npm run frontend:ci
- name: Run unit tests
run: npm run test:ci --verbose
working-directory: client
- test-windows:
- name: 'Tests: Windows'
- needs: build
+ tests_frontend_windows:
+ name: Run frontend unit tests on Windows
+ timeout-minutes: 60
runs-on: windows-latest
- timeout-minutes: 20
+ env:
+ NODE_OPTIONS: '--max-old-space-size=${{ secrets.NODE_MAX_OLD_SPACE_SIZE || 6144 }}'
steps:
- uses: actions/checkout@v4
-
- - name: Use Node.js 20.19
+ - name: Use Node.js 20.x
uses: actions/setup-node@v4
with:
- node-version: '20.19'
-
- - name: Restore node_modules cache
- id: cache-node-modules
- uses: actions/cache@v4
- with:
- path: |
- node_modules
- client/node_modules
- packages/client/node_modules
- packages/data-provider/node_modules
- key: node-modules-frontend-${{ runner.os }}-20.19-${{ hashFiles('package-lock.json') }}
+ node-version: 20
+ cache: 'npm'
- name: Install dependencies
- if: steps.cache-node-modules.outputs.cache-hit != 'true'
run: npm ci
- - name: Download data-provider build
- uses: actions/download-artifact@v4
- with:
- name: build-data-provider
- path: packages/data-provider/dist
-
- - name: Download client-package build
- uses: actions/download-artifact@v4
- with:
- name: build-client-package
- path: packages/client/dist
+ - name: Build Client
+ run: npm run frontend:ci
- name: Run unit tests
run: npm run test:ci --verbose
- working-directory: client
-
- build-verify:
- name: Vite build verification
- needs: build
- runs-on: ubuntu-latest
- timeout-minutes: 15
- steps:
- - uses: actions/checkout@v4
-
- - name: Use Node.js 20.19
- uses: actions/setup-node@v4
- with:
- node-version: '20.19'
-
- - name: Restore node_modules cache
- id: cache-node-modules
- uses: actions/cache@v4
- with:
- path: |
- node_modules
- client/node_modules
- packages/client/node_modules
- packages/data-provider/node_modules
- key: node-modules-frontend-${{ runner.os }}-20.19-${{ hashFiles('package-lock.json') }}
-
- - name: Install dependencies
- if: steps.cache-node-modules.outputs.cache-hit != 'true'
- run: npm ci
-
- - name: Download data-provider build
- uses: actions/download-artifact@v4
- with:
- name: build-data-provider
- path: packages/data-provider/dist
-
- - name: Download client-package build
- uses: actions/download-artifact@v4
- with:
- name: build-client-package
- path: packages/client/dist
-
- - name: Build client
- run: cd client && npm run build:ci
+ working-directory: client
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index e302d15a46..86d4a3ddae 100644
--- a/.gitignore
+++ b/.gitignore
@@ -63,7 +63,6 @@ bower_components/
.clineignore
.cursor
.aider*
-.bg-shell/
# Floobits
.floo
@@ -130,7 +129,6 @@ helm/**/charts/
helm/**/.values.yaml
!/client/src/@types/i18next.d.ts
-!/client/src/@types/react.d.ts
# SAML Idp cert
*.cert
@@ -145,6 +143,7 @@ helm/**/.values.yaml
/.codeium
*.local.md
+
# Removed Windows wrapper files per user request
hive-mind-prompt-*.txt
@@ -155,16 +154,16 @@ claude-flow.config.json
.swarm/
.hive-mind/
.claude-flow/
-/memory/
-/coordination/
-/memory/claude-flow-data.json
-/memory/sessions/*
-!/memory/sessions/README.md
-/memory/agents/*
-!/memory/agents/README.md
-/coordination/memory_bank/*
-/coordination/subtasks/*
-/coordination/orchestration/*
+memory/
+coordination/
+memory/claude-flow-data.json
+memory/sessions/*
+!memory/sessions/README.md
+memory/agents/*
+!memory/agents/README.md
+coordination/memory_bank/*
+coordination/subtasks/*
+coordination/orchestration/*
*.db
*.db-journal
*.db-wal
@@ -172,8 +171,5 @@ claude-flow.config.json
*.sqlite-journal
*.sqlite-wal
claude-flow
-.playwright-mcp/*
# Removed Windows wrapper files per user request
hive-mind-prompt-*.txt
-CLAUDE.md
-.gsd
diff --git a/.husky/pre-commit b/.husky/pre-commit
index 70fef90065..23c736d1de 100755
--- a/.husky/pre-commit
+++ b/.husky/pre-commit
@@ -1,3 +1,2 @@
-#!/bin/sh
[ -n "$CI" ] && exit 0
npx lint-staged --config ./.husky/lint-staged.config.js
diff --git a/AGENTS.md b/AGENTS.md
deleted file mode 100644
index ceb2b988dc..0000000000
--- a/AGENTS.md
+++ /dev/null
@@ -1 +0,0 @@
-CLAUDE.md
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 0000000000..a8cb8282bd
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,236 @@
+# Changelog
+
+All notable changes to this project will be documented in this file.
+
+
+
+
+
+
+## [Unreleased]
+
+### ✨ New Features
+
+- ✨ feat: implement search parameter updates by **@mawburn** in [#7151](https://github.com/danny-avila/LibreChat/pull/7151)
+- 🎏 feat: Add MCP support for Streamable HTTP Transport by **@benverhees** in [#7353](https://github.com/danny-avila/LibreChat/pull/7353)
+- 🔒 feat: Add Content Security Policy using Helmet middleware by **@rubentalstra** in [#7377](https://github.com/danny-avila/LibreChat/pull/7377)
+- ✨ feat: Add Normalization for MCP Server Names by **@danny-avila** in [#7421](https://github.com/danny-avila/LibreChat/pull/7421)
+- 📊 feat: Improve Helm Chart by **@hofq** in [#3638](https://github.com/danny-avila/LibreChat/pull/3638)
+- 🦾 feat: Claude-4 Support by **@danny-avila** in [#7509](https://github.com/danny-avila/LibreChat/pull/7509)
+- 🪨 feat: Bedrock Support for Claude-4 Reasoning by **@danny-avila** in [#7517](https://github.com/danny-avila/LibreChat/pull/7517)
+
+### 🌍 Internationalization
+
+- 🌍 i18n: Add `Danish` and `Czech` and `Catalan` localization support by **@rubentalstra** in [#7373](https://github.com/danny-avila/LibreChat/pull/7373)
+- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#7375](https://github.com/danny-avila/LibreChat/pull/7375)
+- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#7468](https://github.com/danny-avila/LibreChat/pull/7468)
+
+### 🔧 Fixes
+
+- 💬 fix: update aria-label for accessibility in ConvoLink component by **@berry-13** in [#7320](https://github.com/danny-avila/LibreChat/pull/7320)
+- 🔑 fix: use `apiKey` instead of `openAIApiKey` in OpenAI-like Config by **@danny-avila** in [#7337](https://github.com/danny-avila/LibreChat/pull/7337)
+- 🔄 fix: update navigation logic in `useFocusChatEffect` to ensure correct search parameters are used by **@mawburn** in [#7340](https://github.com/danny-avila/LibreChat/pull/7340)
+- 🔄 fix: Improve MCP Connection Cleanup by **@danny-avila** in [#7400](https://github.com/danny-avila/LibreChat/pull/7400)
+- 🛡️ fix: Preset and Validation Logic for URL Query Params by **@danny-avila** in [#7407](https://github.com/danny-avila/LibreChat/pull/7407)
+- 🌘 fix: artifact of preview text is illegible in dark mode by **@nhtruong** in [#7405](https://github.com/danny-avila/LibreChat/pull/7405)
+- 🛡️ fix: Temporarily Remove CSP until Configurable by **@danny-avila** in [#7419](https://github.com/danny-avila/LibreChat/pull/7419)
+- 💽 fix: Exclude index page `/` from static cache settings by **@sbruel** in [#7382](https://github.com/danny-avila/LibreChat/pull/7382)
+
+### ⚙️ Other Changes
+
+- 📜 docs: CHANGELOG for release v0.7.8 by **@github-actions[bot]** in [#7290](https://github.com/danny-avila/LibreChat/pull/7290)
+- 📦 chore: Update API Package Dependencies by **@danny-avila** in [#7359](https://github.com/danny-avila/LibreChat/pull/7359)
+- 📜 docs: Unreleased Changelog by **@github-actions[bot]** in [#7321](https://github.com/danny-avila/LibreChat/pull/7321)
+- 📜 docs: Unreleased Changelog by **@github-actions[bot]** in [#7434](https://github.com/danny-avila/LibreChat/pull/7434)
+- 🛡️ chore: `multer` v2.0.0 for CVE-2025-47935 and CVE-2025-47944 by **@danny-avila** in [#7454](https://github.com/danny-avila/LibreChat/pull/7454)
+- 📂 refactor: Improve `FileAttachment` & File Form Deletion by **@danny-avila** in [#7471](https://github.com/danny-avila/LibreChat/pull/7471)
+- 📊 chore: Remove Old Helm Chart by **@hofq** in [#7512](https://github.com/danny-avila/LibreChat/pull/7512)
+- 🪖 chore: bump helm app version to v0.7.8 by **@austin-barrington** in [#7524](https://github.com/danny-avila/LibreChat/pull/7524)
+
+
+
+---
+## [v0.7.8] -
+
+Changes from v0.7.8-rc1 to v0.7.8.
+
+### ✨ New Features
+
+- ✨ feat: Enhance form submission for touch screens by **@berry-13** in [#7198](https://github.com/danny-avila/LibreChat/pull/7198)
+- 🔍 feat: Additional Tavily API Tool Parameters by **@glowforge-opensource** in [#7232](https://github.com/danny-avila/LibreChat/pull/7232)
+- 🐋 feat: Add python to Dockerfile for increased MCP compatibility by **@technicalpickles** in [#7270](https://github.com/danny-avila/LibreChat/pull/7270)
+
+### 🔧 Fixes
+
+- 🔧 fix: Google Gemma Support & OpenAI Reasoning Instructions by **@danny-avila** in [#7196](https://github.com/danny-avila/LibreChat/pull/7196)
+- 🛠️ fix: Conversation Navigation State by **@danny-avila** in [#7210](https://github.com/danny-avila/LibreChat/pull/7210)
+- 🔄 fix: o-Series Model Regex for System Messages by **@danny-avila** in [#7245](https://github.com/danny-avila/LibreChat/pull/7245)
+- 🔖 fix: Custom Headers for Initial MCP SSE Connection by **@danny-avila** in [#7246](https://github.com/danny-avila/LibreChat/pull/7246)
+- 🛡️ fix: Deep Clone `MCPOptions` for User MCP Connections by **@danny-avila** in [#7247](https://github.com/danny-avila/LibreChat/pull/7247)
+- 🔄 fix: URL Param Race Condition and File Draft Persistence by **@danny-avila** in [#7257](https://github.com/danny-avila/LibreChat/pull/7257)
+- 🔄 fix: Assistants Endpoint & Minor Issues by **@danny-avila** in [#7274](https://github.com/danny-avila/LibreChat/pull/7274)
+- 🔄 fix: Ollama Think Tag Edge Case with Tools by **@danny-avila** in [#7275](https://github.com/danny-avila/LibreChat/pull/7275)
+
+### ⚙️ Other Changes
+
+- 📜 docs: CHANGELOG for release v0.7.8-rc1 by **@github-actions[bot]** in [#7153](https://github.com/danny-avila/LibreChat/pull/7153)
+- 🔄 refactor: Artifact Visibility Management by **@danny-avila** in [#7181](https://github.com/danny-avila/LibreChat/pull/7181)
+- 📦 chore: Bump Package Security by **@danny-avila** in [#7183](https://github.com/danny-avila/LibreChat/pull/7183)
+- 🌿 refactor: Unmount Fork Popover on Hide for Better Performance by **@danny-avila** in [#7189](https://github.com/danny-avila/LibreChat/pull/7189)
+- 🧰 chore: ESLint configuration to enforce Prettier formatting rules by **@mawburn** in [#7186](https://github.com/danny-avila/LibreChat/pull/7186)
+- 🎨 style: Improve KaTeX Rendering for LaTeX Equations by **@andresgit** in [#7223](https://github.com/danny-avila/LibreChat/pull/7223)
+- 📝 docs: Update `.env.example` Google models by **@marlonka** in [#7254](https://github.com/danny-avila/LibreChat/pull/7254)
+- 💬 refactor: MCP Chat Visibility Option, Google Rates, Remove OpenAPI Plugins by **@danny-avila** in [#7286](https://github.com/danny-avila/LibreChat/pull/7286)
+- 📜 docs: Unreleased Changelog by **@github-actions[bot]** in [#7214](https://github.com/danny-avila/LibreChat/pull/7214)
+
+
+
+[See full release details][release-v0.7.8]
+
+[release-v0.7.8]: https://github.com/danny-avila/LibreChat/releases/tag/v0.7.8
+
+---
+## [v0.7.8-rc1] -
+
+Changes from v0.7.7 to v0.7.8-rc1.
+
+### ✨ New Features
+
+- 🔍 feat: Mistral OCR API / Upload Files as Text by **@danny-avila** in [#6274](https://github.com/danny-avila/LibreChat/pull/6274)
+- 🤖 feat: Support OpenAI Web Search models by **@danny-avila** in [#6313](https://github.com/danny-avila/LibreChat/pull/6313)
+- 🔗 feat: Agent Chain (Mixture-of-Agents) by **@danny-avila** in [#6374](https://github.com/danny-avila/LibreChat/pull/6374)
+- ⌛ feat: `initTimeout` for Slow Starting MCP Servers by **@perweij** in [#6383](https://github.com/danny-avila/LibreChat/pull/6383)
+- 🚀 feat: `S3` Integration for File handling and Image uploads by **@rubentalstra** in [#6142](https://github.com/danny-avila/LibreChat/pull/6142)
+- 🔒feat: Enable OpenID Auto-Redirect by **@leondape** in [#6066](https://github.com/danny-avila/LibreChat/pull/6066)
+- 🚀 feat: Integrate `Azure Blob Storage` for file handling and image uploads by **@rubentalstra** in [#6153](https://github.com/danny-avila/LibreChat/pull/6153)
+- 🚀 feat: Add support for custom `AWS` endpoint in `S3` by **@rubentalstra** in [#6431](https://github.com/danny-avila/LibreChat/pull/6431)
+- 🚀 feat: Add support for LDAP STARTTLS in LDAP authentication by **@rubentalstra** in [#6438](https://github.com/danny-avila/LibreChat/pull/6438)
+- 🚀 feat: Refactor schema exports and update package version to 0.0.4 by **@rubentalstra** in [#6455](https://github.com/danny-avila/LibreChat/pull/6455)
+- 🔼 feat: Add Auto Submit For URL Query Params by **@mjaverto** in [#6440](https://github.com/danny-avila/LibreChat/pull/6440)
+- 🛠 feat: Enhance Redis Integration, Rate Limiters & Log Headers by **@danny-avila** in [#6462](https://github.com/danny-avila/LibreChat/pull/6462)
+- 💵 feat: Add Automatic Balance Refill by **@rubentalstra** in [#6452](https://github.com/danny-avila/LibreChat/pull/6452)
+- 🗣️ feat: add support for gpt-4o-transcribe models by **@berry-13** in [#6483](https://github.com/danny-avila/LibreChat/pull/6483)
+- 🎨 feat: UI Refresh for Enhanced UX by **@berry-13** in [#6346](https://github.com/danny-avila/LibreChat/pull/6346)
+- 🌍 feat: Add support for Hungarian language localization by **@rubentalstra** in [#6508](https://github.com/danny-avila/LibreChat/pull/6508)
+- 🚀 feat: Add Gemini 2.5 Token/Context Values, Increase Max Possible Output to 64k by **@danny-avila** in [#6563](https://github.com/danny-avila/LibreChat/pull/6563)
+- 🚀 feat: Enhance MCP Connections For Multi-User Support by **@danny-avila** in [#6610](https://github.com/danny-avila/LibreChat/pull/6610)
+- 🚀 feat: Enhance S3 URL Expiry with Refresh; fix: S3 File Deletion by **@danny-avila** in [#6647](https://github.com/danny-avila/LibreChat/pull/6647)
+- 🚀 feat: enhance UI components and refactor settings by **@berry-13** in [#6625](https://github.com/danny-avila/LibreChat/pull/6625)
+- 💬 feat: move TemporaryChat to the Header by **@berry-13** in [#6646](https://github.com/danny-avila/LibreChat/pull/6646)
+- 🚀 feat: Use Model Specs + Specific Endpoints, Limit Providers for Agents by **@danny-avila** in [#6650](https://github.com/danny-avila/LibreChat/pull/6650)
+- 🪙 feat: Sync Balance Config on Login by **@danny-avila** in [#6671](https://github.com/danny-avila/LibreChat/pull/6671)
+- 🔦 feat: MCP Support for Non-Agent Endpoints by **@danny-avila** in [#6775](https://github.com/danny-avila/LibreChat/pull/6775)
+- 🗃️ feat: Code Interpreter File Persistence between Sessions by **@danny-avila** in [#6790](https://github.com/danny-avila/LibreChat/pull/6790)
+- 🖥️ feat: Code Interpreter API for Non-Agent Endpoints by **@danny-avila** in [#6803](https://github.com/danny-avila/LibreChat/pull/6803)
+- ⚡ feat: Self-hosted Artifacts Static Bundler URL by **@danny-avila** in [#6827](https://github.com/danny-avila/LibreChat/pull/6827)
+- 🐳 feat: Add Jemalloc and UV to Docker Builds by **@danny-avila** in [#6836](https://github.com/danny-avila/LibreChat/pull/6836)
+- 🤖 feat: GPT-4.1 by **@danny-avila** in [#6880](https://github.com/danny-avila/LibreChat/pull/6880)
+- 👋 feat: remove Edge TTS by **@berry-13** in [#6885](https://github.com/danny-avila/LibreChat/pull/6885)
+- feat: nav optimization by **@berry-13** in [#5785](https://github.com/danny-avila/LibreChat/pull/5785)
+- 🗺️ feat: Add Parameter Location Mapping for OpenAPI actions by **@peeeteeer** in [#6858](https://github.com/danny-avila/LibreChat/pull/6858)
+- 🤖 feat: Support `o4-mini` and `o3` Models by **@danny-avila** in [#6928](https://github.com/danny-avila/LibreChat/pull/6928)
+- 🎨 feat: OpenAI Image Tools (GPT-Image-1) by **@danny-avila** in [#7079](https://github.com/danny-avila/LibreChat/pull/7079)
+- 🗓️ feat: Add Special Variables for Prompts & Agents, Prompt UI Improvements by **@danny-avila** in [#7123](https://github.com/danny-avila/LibreChat/pull/7123)
+
+### 🌍 Internationalization
+
+- 🌍 i18n: Add Thai Language Support and Update Translations by **@rubentalstra** in [#6219](https://github.com/danny-avila/LibreChat/pull/6219)
+- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#6220](https://github.com/danny-avila/LibreChat/pull/6220)
+- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#6240](https://github.com/danny-avila/LibreChat/pull/6240)
+- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#6241](https://github.com/danny-avila/LibreChat/pull/6241)
+- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#6277](https://github.com/danny-avila/LibreChat/pull/6277)
+- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#6414](https://github.com/danny-avila/LibreChat/pull/6414)
+- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#6505](https://github.com/danny-avila/LibreChat/pull/6505)
+- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#6530](https://github.com/danny-avila/LibreChat/pull/6530)
+- 🌍 i18n: Add Persian Localization Support by **@rubentalstra** in [#6669](https://github.com/danny-avila/LibreChat/pull/6669)
+- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#6667](https://github.com/danny-avila/LibreChat/pull/6667)
+- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#7126](https://github.com/danny-avila/LibreChat/pull/7126)
+- 🌍 i18n: Update translation.json with latest translations by **@github-actions[bot]** in [#7148](https://github.com/danny-avila/LibreChat/pull/7148)
+
+### 👐 Accessibility
+
+- 🎨 a11y: Update Model Spec Description Text by **@berry-13** in [#6294](https://github.com/danny-avila/LibreChat/pull/6294)
+- 🗑️ a11y: Add Accessible Name to Button for File Attachment Removal by **@kangabell** in [#6709](https://github.com/danny-avila/LibreChat/pull/6709)
+- ⌨️ a11y: enhance accessibility & visual consistency by **@berry-13** in [#6866](https://github.com/danny-avila/LibreChat/pull/6866)
+- 🙌 a11y: Searchbar/Conversations List Focus by **@danny-avila** in [#7096](https://github.com/danny-avila/LibreChat/pull/7096)
+- 👐 a11y: Improve Fork and SplitText Accessibility by **@danny-avila** in [#7147](https://github.com/danny-avila/LibreChat/pull/7147)
+
+### 🔧 Fixes
+
+- 🐛 fix: Avatar Type Definitions in Agent/Assistant Schemas by **@danny-avila** in [#6235](https://github.com/danny-avila/LibreChat/pull/6235)
+- 🔧 fix: MeiliSearch Field Error and Patch Incorrect Import by #6210 by **@rubentalstra** in [#6245](https://github.com/danny-avila/LibreChat/pull/6245)
+- 🔏 fix: Enhance Two-Factor Authentication by **@rubentalstra** in [#6247](https://github.com/danny-avila/LibreChat/pull/6247)
+- 🐛 fix: Await saveMessage in abortMiddleware to ensure proper execution by **@sh4shii** in [#6248](https://github.com/danny-avila/LibreChat/pull/6248)
+- 🔧 fix: Axios Proxy Usage And Bump `mongoose` by **@danny-avila** in [#6298](https://github.com/danny-avila/LibreChat/pull/6298)
+- 🔧 fix: comment out MCP servers to resolve service run issues by **@KunalScriptz** in [#6316](https://github.com/danny-avila/LibreChat/pull/6316)
+- 🔧 fix: Update Token Calculations and Mapping, MCP `env` Initialization by **@danny-avila** in [#6406](https://github.com/danny-avila/LibreChat/pull/6406)
+- 🐞 fix: Agent "Resend" Message Attachments + Source Icon Styling by **@danny-avila** in [#6408](https://github.com/danny-avila/LibreChat/pull/6408)
+- 🐛 fix: Prevent Crash on Duplicate Message ID by **@Odrec** in [#6392](https://github.com/danny-avila/LibreChat/pull/6392)
+- 🔐 fix: Invalid Key Length in 2FA Encryption by **@rubentalstra** in [#6432](https://github.com/danny-avila/LibreChat/pull/6432)
+- 🏗️ fix: Fix Agents Token Spend Race Conditions, Expand Test Coverage by **@danny-avila** in [#6480](https://github.com/danny-avila/LibreChat/pull/6480)
+- 🔃 fix: Draft Clearing, Claude Titles, Remove Default Vision Max Tokens by **@danny-avila** in [#6501](https://github.com/danny-avila/LibreChat/pull/6501)
+- 🔧 fix: Update username reference to use user.name in greeting display by **@rubentalstra** in [#6534](https://github.com/danny-avila/LibreChat/pull/6534)
+- 🔧 fix: S3 Download Stream with Key Extraction and Blob Storage Encoding for Vision by **@danny-avila** in [#6557](https://github.com/danny-avila/LibreChat/pull/6557)
+- 🔧 fix: Mistral type strictness for `usage` & update token values/windows by **@danny-avila** in [#6562](https://github.com/danny-avila/LibreChat/pull/6562)
+- 🔧 fix: Consolidate Text Parsing and TTS Edge Initialization by **@danny-avila** in [#6582](https://github.com/danny-avila/LibreChat/pull/6582)
+- 🔧 fix: Ensure continuation in image processing on base64 encoding from Blob Storage by **@danny-avila** in [#6619](https://github.com/danny-avila/LibreChat/pull/6619)
+- ✉️ fix: Fallback For User Name In Email Templates by **@danny-avila** in [#6620](https://github.com/danny-avila/LibreChat/pull/6620)
+- 🔧 fix: Azure Blob Integration and File Source References by **@rubentalstra** in [#6575](https://github.com/danny-avila/LibreChat/pull/6575)
+- 🐛 fix: Safeguard against undefined addedEndpoints by **@wipash** in [#6654](https://github.com/danny-avila/LibreChat/pull/6654)
+- 🤖 fix: Gemini 2.5 Vision Support by **@danny-avila** in [#6663](https://github.com/danny-avila/LibreChat/pull/6663)
+- 🔄 fix: Avatar & Error Handling Enhancements by **@danny-avila** in [#6687](https://github.com/danny-avila/LibreChat/pull/6687)
+- 🔧 fix: Chat Middleware, Zod Conversion, Auto-Save and S3 URL Refresh by **@danny-avila** in [#6720](https://github.com/danny-avila/LibreChat/pull/6720)
+- 🔧 fix: Agent Capability Checks & DocumentDB Compatibility for Agent Resource Removal by **@danny-avila** in [#6726](https://github.com/danny-avila/LibreChat/pull/6726)
+- 🔄 fix: Improve audio MIME type detection and handling by **@berry-13** in [#6707](https://github.com/danny-avila/LibreChat/pull/6707)
+- 🪺 fix: Update Role Handling due to New Schema Shape by **@danny-avila** in [#6774](https://github.com/danny-avila/LibreChat/pull/6774)
+- 🗨️ fix: Show ModelSpec Greeting by **@berry-13** in [#6770](https://github.com/danny-avila/LibreChat/pull/6770)
+- 🔧 fix: Keyv and Proxy Issues, and More Memory Optimizations by **@danny-avila** in [#6867](https://github.com/danny-avila/LibreChat/pull/6867)
+- ✨ fix: Implement dynamic text sizing for greeting and name display by **@berry-13** in [#6833](https://github.com/danny-avila/LibreChat/pull/6833)
+- 📝 fix: Mistral OCR Image Support and Azure Agent Titles by **@danny-avila** in [#6901](https://github.com/danny-avila/LibreChat/pull/6901)
+- 📢 fix: Invalid `engineTTS` and Conversation State on Navigation by **@berry-13** in [#6904](https://github.com/danny-avila/LibreChat/pull/6904)
+- 🛠️ fix: Improve Accessibility and Display of Conversation Menu by **@danny-avila** in [#6913](https://github.com/danny-avila/LibreChat/pull/6913)
+- 🔧 fix: Agent Resource Form, Convo Menu Style, Ensure Draft Clears on Submission by **@danny-avila** in [#6925](https://github.com/danny-avila/LibreChat/pull/6925)
+- 🔀 fix: MCP Improvements, Auto-Save Drafts, Artifact Markup by **@danny-avila** in [#7040](https://github.com/danny-avila/LibreChat/pull/7040)
+- 🐋 fix: Improve Deepseek Compatbility by **@danny-avila** in [#7132](https://github.com/danny-avila/LibreChat/pull/7132)
+- 🐙 fix: Add Redis Ping Interval to Prevent Connection Drops by **@peeeteeer** in [#7127](https://github.com/danny-avila/LibreChat/pull/7127)
+
+### ⚙️ Other Changes
+
+- 📦 refactor: Move DB Models to `@librechat/data-schemas` by **@rubentalstra** in [#6210](https://github.com/danny-avila/LibreChat/pull/6210)
+- 📦 chore: Patch `axios` to address CVE-2025-27152 by **@danny-avila** in [#6222](https://github.com/danny-avila/LibreChat/pull/6222)
+- ⚠️ refactor: Use Error Content Part Instead Of Throwing Error for Agents by **@danny-avila** in [#6262](https://github.com/danny-avila/LibreChat/pull/6262)
+- 🏃♂️ refactor: Improve Agent Run Context & Misc. Changes by **@danny-avila** in [#6448](https://github.com/danny-avila/LibreChat/pull/6448)
+- 📝 docs: librechat.example.yaml by **@ineiti** in [#6442](https://github.com/danny-avila/LibreChat/pull/6442)
+- 🏃♂️ refactor: More Agent Context Improvements during Run by **@danny-avila** in [#6477](https://github.com/danny-avila/LibreChat/pull/6477)
+- 🔃 refactor: Allow streaming for `o1` models by **@danny-avila** in [#6509](https://github.com/danny-avila/LibreChat/pull/6509)
+- 🔧 chore: `Vite` Plugin Upgrades & Config Optimizations by **@rubentalstra** in [#6547](https://github.com/danny-avila/LibreChat/pull/6547)
+- 🔧 refactor: Consolidate Logging, Model Selection & Actions Optimizations, Minor Fixes by **@danny-avila** in [#6553](https://github.com/danny-avila/LibreChat/pull/6553)
+- 🎨 style: Address Minor UI Refresh Issues by **@berry-13** in [#6552](https://github.com/danny-avila/LibreChat/pull/6552)
+- 🔧 refactor: Enhance Model & Endpoint Configurations with Global Indicators 🌍 by **@berry-13** in [#6578](https://github.com/danny-avila/LibreChat/pull/6578)
+- 💬 style: Chat UI, Greeting, and Message adjustments by **@berry-13** in [#6612](https://github.com/danny-avila/LibreChat/pull/6612)
+- ⚡ refactor: DocumentDB Compatibility for Balance Updates by **@danny-avila** in [#6673](https://github.com/danny-avila/LibreChat/pull/6673)
+- 🧹 chore: Update ESLint rules for React hooks by **@rubentalstra** in [#6685](https://github.com/danny-avila/LibreChat/pull/6685)
+- 🪙 chore: Update Gemini Pricing by **@RedwindA** in [#6731](https://github.com/danny-avila/LibreChat/pull/6731)
+- 🪺 refactor: Nest Permission fields for Roles by **@rubentalstra** in [#6487](https://github.com/danny-avila/LibreChat/pull/6487)
+- 📦 chore: Update `caniuse-lite` dependency to version 1.0.30001706 by **@rubentalstra** in [#6482](https://github.com/danny-avila/LibreChat/pull/6482)
+- ⚙️ refactor: OAuth Flow Signal, Type Safety, Tool Progress & Updated Packages by **@danny-avila** in [#6752](https://github.com/danny-avila/LibreChat/pull/6752)
+- 📦 chore: bump vite from 6.2.3 to 6.2.5 by **@dependabot[bot]** in [#6745](https://github.com/danny-avila/LibreChat/pull/6745)
+- 💾 chore: Enhance Local Storage Handling and Update MCP SDK by **@danny-avila** in [#6809](https://github.com/danny-avila/LibreChat/pull/6809)
+- 🤖 refactor: Improve Agents Memory Usage, Bump Keyv, Grok 3 by **@danny-avila** in [#6850](https://github.com/danny-avila/LibreChat/pull/6850)
+- 💾 refactor: Enhance Memory In Image Encodings & Client Disposal by **@danny-avila** in [#6852](https://github.com/danny-avila/LibreChat/pull/6852)
+- 🔁 refactor: Token Event Handler and Standardize `maxTokens` Key by **@danny-avila** in [#6886](https://github.com/danny-avila/LibreChat/pull/6886)
+- 🔍 refactor: Search & Message Retrieval by **@berry-13** in [#6903](https://github.com/danny-avila/LibreChat/pull/6903)
+- 🎨 style: standardize dropdown styling & fix z-Index layering by **@berry-13** in [#6939](https://github.com/danny-avila/LibreChat/pull/6939)
+- 📙 docs: CONTRIBUTING.md by **@dblock** in [#6831](https://github.com/danny-avila/LibreChat/pull/6831)
+- 🧭 refactor: Modernize Nav/Header by **@danny-avila** in [#7094](https://github.com/danny-avila/LibreChat/pull/7094)
+- 🪶 refactor: Chat Input Focus for Conversation Navigations & ChatForm Optimizations by **@danny-avila** in [#7100](https://github.com/danny-avila/LibreChat/pull/7100)
+- 🔃 refactor: Streamline Navigation, Message Loading UX by **@danny-avila** in [#7118](https://github.com/danny-avila/LibreChat/pull/7118)
+- 📜 docs: Unreleased changelog by **@github-actions[bot]** in [#6265](https://github.com/danny-avila/LibreChat/pull/6265)
+
+
+
+[See full release details][release-v0.7.8-rc1]
+
+[release-v0.7.8-rc1]: https://github.com/danny-avila/LibreChat/releases/tag/v0.7.8-rc1
+
+---
diff --git a/CLAUDE.md b/CLAUDE.md
deleted file mode 100644
index 81362cfc57..0000000000
--- a/CLAUDE.md
+++ /dev/null
@@ -1,172 +0,0 @@
-# LibreChat
-
-## Project Overview
-
-LibreChat is a monorepo with the following key workspaces:
-
-| Workspace | Language | Side | Dependency | Purpose |
-|---|---|---|---|---|
-| `/api` | JS (legacy) | Backend | `packages/api`, `packages/data-schemas`, `packages/data-provider`, `@librechat/agents` | Express server — minimize changes here |
-| `/packages/api` | **TypeScript** | Backend | `packages/data-schemas`, `packages/data-provider` | New backend code lives here (TS only, consumed by `/api`) |
-| `/packages/data-schemas` | TypeScript | Backend | `packages/data-provider` | Database models/schemas, shareable across backend projects |
-| `/packages/data-provider` | TypeScript | Shared | — | Shared API types, endpoints, data-service — used by both frontend and backend |
-| `/client` | TypeScript/React | Frontend | `packages/data-provider`, `packages/client` | Frontend SPA |
-| `/packages/client` | TypeScript | Frontend | `packages/data-provider` | Shared frontend utilities |
-
-The source code for `@librechat/agents` (major backend dependency, same team) is at `/home/danny/agentus`.
-
----
-
-## Workspace Boundaries
-
-- **All new backend code must be TypeScript** in `/packages/api`.
-- Keep `/api` changes to the absolute minimum (thin JS wrappers calling into `/packages/api`).
-- Database-specific shared logic goes in `/packages/data-schemas`.
-- Frontend/backend shared API logic (endpoints, types, data-service) goes in `/packages/data-provider`.
-- Build data-provider from project root: `npm run build:data-provider`.
-
----
-
-## Code Style
-
-### Naming and File Organization
-
-- **Single-word file names** whenever possible (e.g., `permissions.ts`, `capabilities.ts`, `service.ts`).
-- When multiple words are needed, prefer grouping related modules under a **single-word directory** rather than using multi-word file names (e.g., `admin/capabilities.ts` not `adminCapabilities.ts`).
-- The directory already provides context — `app/service.ts` not `app/appConfigService.ts`.
-
-### Structure and Clarity
-
-- **Never-nesting**: early returns, flat code, minimal indentation. Break complex operations into well-named helpers.
-- **Functional first**: pure functions, immutable data, `map`/`filter`/`reduce` over imperative loops. Only reach for OOP when it clearly improves domain modeling or state encapsulation.
-- **No dynamic imports** unless absolutely necessary.
-
-### DRY
-
-- Extract repeated logic into utility functions.
-- Reusable hooks / higher-order components for UI patterns.
-- Parameterized helpers instead of near-duplicate functions.
-- Constants for repeated values; configuration objects over duplicated init code.
-- Shared validators, centralized error handling, single source of truth for business rules.
-- Shared typing system with interfaces/types extending common base definitions.
-- Abstraction layers for external API interactions.
-
-### Iteration and Performance
-
-- **Minimize looping** — especially over shared data structures like message arrays, which are iterated frequently throughout the codebase. Every additional pass adds up at scale.
-- Consolidate sequential O(n) operations into a single pass whenever possible; never loop over the same collection twice if the work can be combined.
-- Choose data structures that reduce the need to iterate (e.g., `Map`/`Set` for lookups instead of `Array.find`/`Array.includes`).
-- Avoid unnecessary object creation; consider space-time tradeoffs.
-- Prevent memory leaks: careful with closures, dispose resources/event listeners, no circular references.
-
-### Type Safety
-
-- **Never use `any`**. Explicit types for all parameters, return values, and variables.
-- **Limit `unknown`** — avoid `unknown`, `Record`, and `as unknown as T` assertions. A `Record` almost always signals a missing explicit type definition.
-- **Don't duplicate types** — before defining a new type, check whether it already exists in the project (especially `packages/data-provider`). Reuse and extend existing types rather than creating redundant definitions.
-- Use union types, generics, and interfaces appropriately.
-- All TypeScript and ESLint warnings/errors must be addressed — do not leave unresolved diagnostics.
-
-### Comments and Documentation
-
-- Write self-documenting code; no inline comments narrating what code does.
-- JSDoc only for complex/non-obvious logic or intellisense on public APIs.
-- Single-line JSDoc for brief docs, multi-line for complex cases.
-- Avoid standalone `//` comments unless absolutely necessary.
-
-### Import Order
-
-Imports are organized into three sections:
-
-1. **Package imports** — sorted shortest to longest line length (`react` always first).
-2. **`import type` imports** — sorted longest to shortest (package types first, then local types; length resets between sub-groups).
-3. **Local/project imports** — sorted longest to shortest.
-
-Multi-line imports count total character length across all lines. Consolidate value imports from the same module. Always use standalone `import type { ... }` — never inline `type` inside value imports.
-
-### JS/TS Loop Preferences
-
-- **Limit looping as much as possible.** Prefer single-pass transformations and avoid re-iterating the same data.
-- `for (let i = 0; ...)` for performance-critical or index-dependent operations.
-- `for...of` for simple array iteration.
-- `for...in` only for object property enumeration.
-
----
-
-## Frontend Rules (`client/src/**/*`)
-
-### Localization
-
-- All user-facing text must use `useLocalize()`.
-- Only update English keys in `client/src/locales/en/translation.json` (other languages are automated externally).
-- Semantic key prefixes: `com_ui_`, `com_assistants_`, etc.
-
-### Components
-
-- TypeScript for all React components with proper type imports.
-- Semantic HTML with ARIA labels (`role`, `aria-label`) for accessibility.
-- Group related components in feature directories (e.g., `SidePanel/Memories/`).
-- Use index files for clean exports.
-
-### Data Management
-
-- Feature hooks: `client/src/data-provider/[Feature]/queries.ts` → `[Feature]/index.ts` → `client/src/data-provider/index.ts`.
-- React Query (`@tanstack/react-query`) for all API interactions; proper query invalidation on mutations.
-- QueryKeys and MutationKeys in `packages/data-provider/src/keys.ts`.
-
-### Data-Provider Integration
-
-- Endpoints: `packages/data-provider/src/api-endpoints.ts`
-- Data service: `packages/data-provider/src/data-service.ts`
-- Types: `packages/data-provider/src/types/queries.ts`
-- Use `encodeURIComponent` for dynamic URL parameters.
-
-### Performance
-
-- Prioritize memory and speed efficiency at scale.
-- Cursor pagination for large datasets.
-- Proper dependency arrays to avoid unnecessary re-renders.
-- Leverage React Query caching and background refetching.
-
----
-
-## Development Commands
-
-| Command | Purpose |
-|---|---|
-| `npm run smart-reinstall` | Install deps (if lockfile changed) + build via Turborepo |
-| `npm run reinstall` | Clean install — wipe `node_modules` and reinstall from scratch |
-| `npm run backend` | Start the backend server |
-| `npm run backend:dev` | Start backend with file watching (development) |
-| `npm run build` | Build all compiled code via Turborepo (parallel, cached) |
-| `npm run frontend` | Build all compiled code sequentially (legacy fallback) |
-| `npm run frontend:dev` | Start frontend dev server with HMR (port 3090, requires backend running) |
-| `npm run build:data-provider` | Rebuild `packages/data-provider` after changes |
-
-- Node.js: v20.19.0+ or ^22.12.0 or >= 23.0.0
-- Database: MongoDB
-- Backend runs on `http://localhost:3080/`; frontend dev server on `http://localhost:3090/`
-
----
-
-## Testing
-
-- Framework: **Jest**, run per-workspace.
-- Run tests from their workspace directory: `cd api && npx jest `, `cd packages/api && npx jest `, etc.
-- Frontend tests: `__tests__` directories alongside components; use `test/layout-test-utils` for rendering.
-- Cover loading, success, and error states for UI/data flows.
-
-### Philosophy
-
-- **Real logic over mocks.** Exercise actual code paths with real dependencies. Mocking is a last resort.
-- **Spies over mocks.** Assert that real functions are called with expected arguments and frequency without replacing underlying logic.
-- **MongoDB**: use `mongodb-memory-server` for a real in-memory MongoDB instance. Test actual queries and schema validation, not mocked DB calls.
-- **MCP**: use real `@modelcontextprotocol/sdk` exports for servers, transports, and tool definitions. Mirror real scenarios, don't stub SDK internals.
-- Only mock what you cannot control: external HTTP APIs, rate-limited services, non-deterministic system calls.
-- Heavy mocking is a code smell, not a testing strategy.
-
----
-
-## Formatting
-
-Fix all formatting lint errors (trailing spaces, tabs, newlines, indentation) using auto-fix when available. All TypeScript/ESLint warnings and errors **must** be resolved.
diff --git a/Dockerfile b/Dockerfile
index 19d275eb31..d45844c4a6 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,9 +1,9 @@
-# v0.8.4
+# v0.8.3-rc1
# Base node image
FROM node:20-alpine AS node
-RUN apk upgrade --no-cache
+# Install jemalloc
RUN apk add --no-cache jemalloc
RUN apk add --no-cache python3 py3-pip uv
diff --git a/Dockerfile.multi b/Dockerfile.multi
index bf5570f386..5a610725d5 100644
--- a/Dockerfile.multi
+++ b/Dockerfile.multi
@@ -1,12 +1,12 @@
# Dockerfile.multi
-# v0.8.4
+# v0.8.3-rc1
# Set configurable max-old-space-size with default
ARG NODE_MAX_OLD_SPACE_SIZE=6144
# Base for all builds
FROM node:20-alpine AS base-min
-RUN apk upgrade --no-cache
+# Install jemalloc
RUN apk add --no-cache jemalloc
# Set environment variable to use jemalloc
ENV LD_PRELOAD=/usr/lib/libjemalloc.so.2
diff --git a/README.md b/README.md
index a7f68d9a92..6e04396637 100644
--- a/README.md
+++ b/README.md
@@ -7,11 +7,6 @@
-
- English ·
- 中文
-
-
-
-
+
+
diff --git a/README.zh.md b/README.zh.md
deleted file mode 100644
index 7f74057413..0000000000
--- a/README.zh.md
+++ /dev/null
@@ -1,227 +0,0 @@
-
-
-
-
-
-
-
-
-
-
- English ·
- 中文
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-# ✨ 功能
-
-- 🖥️ **UI 与体验**:受 ChatGPT 启发,并具备更强的设计与功能。
-
-- 🤖 **AI 模型选择**:
- - Anthropic (Claude), AWS Bedrock, OpenAI, Azure OpenAI, Google, Vertex AI, OpenAI Responses API (包含 Azure)
- - [自定义端点 (Custom Endpoints)](https://www.librechat.ai/docs/quick_start/custom_endpoints):LibreChat 支持任何兼容 OpenAI 规范的 API,无需代理。
- - 兼容[本地与远程 AI 服务商](https://www.librechat.ai/docs/configuration/librechat_yaml/ai_endpoints):
- - Ollama, groq, Cohere, Mistral AI, Apple MLX, koboldcpp, together.ai,
- - OpenRouter, Helicone, Perplexity, ShuttleAI, Deepseek, Qwen 等。
-
-- 🔧 **[代码解释器 (Code Interpreter) API](https://www.librechat.ai/docs/features/code_interpreter)**:
- - 安全的沙箱执行环境,支持 Python, Node.js (JS/TS), Go, C/C++, Java, PHP, Rust 和 Fortran。
- - 无缝文件处理:直接上传、处理并下载文件。
- - 隐私无忧:完全隔离且安全的执行环境。
-
-- 🔦 **智能体与工具集成**:
- - **[LibreChat 智能体 (Agents)](https://www.librechat.ai/docs/features/agents)**:
- - 无代码定制助手:无需编程即可构建专业化的 AI 驱动助手。
- - 智能体市场:发现并部署社区构建的智能体。
- - 协作共享:与特定用户和群组共享智能体。
- - 灵活且可扩展:支持 MCP 服务器、工具、文件搜索、代码执行等。
- - 兼容自定义端点、OpenAI, Azure, Anthropic, AWS Bedrock, Google, Vertex AI, Responses API 等。
- - [支持模型上下文协议 (MCP)](https://modelcontextprotocol.io/clients#librechat) 用于工具调用。
-
-- 🔍 **网页搜索**:
- - 搜索互联网并检索相关信息以增强 AI 上下文。
- - 结合搜索提供商、内容爬虫和结果重排序,确保最佳检索效果。
- - **可定制 Jina 重排序**:配置自定义 Jina API URL 用于重排序服务。
- - **[了解更多 →](https://www.librechat.ai/docs/features/web_search)**
-
-- 🪄 **支持代码 Artifacts 的生成式 UI**:
- - [代码 Artifacts](https://youtu.be/GfTj7O4gmd0?si=WJbdnemZpJzBrJo3) 允许在对话中直接创建 React 组件、HTML 页面和 Mermaid 图表。
-
-- 🎨 **图像生成与编辑**:
- - 使用 [GPT-Image-1](https://www.librechat.ai/docs/features/image_gen#1--openai-image-tools-recommended) 进行文生图与图生图。
- - 支持 [DALL-E (3/2)](https://www.librechat.ai/docs/features/image_gen#2--dalle-legacy), [Stable Diffusion](https://www.librechat.ai/docs/features/image_gen#3--stable-diffusion-local), [Flux](https://www.librechat.ai/docs/features/image_gen#4--flux) 或任何 [MCP 服务器](https://www.librechat.ai/docs/features/image_gen#5--model-context-protocol-mcp)。
- - 根据提示词生成惊艳的视觉效果,或通过指令精修现有图像。
-
-- 💾 **预设与上下文管理**:
- - 创建、保存并分享自定义预设。
- - 在对话中随时切换 AI 端点和预设。
- - 编辑、重新提交并通过对话分支继续消息。
- - 创建并与特定用户和群组共享提示词。
- - [消息与对话分叉 (Fork)](https://www.librechat.ai/docs/features/fork) 以实现高级上下文控制。
-
-- 💬 **多模态与文件交互**:
- - 使用 Claude 3, GPT-4.5, GPT-4o, o1, Llama-Vision 和 Gemini 上传并分析图像 📸。
- - 支持通过自定义端点、OpenAI, Azure, Anthropic, AWS Bedrock 和 Google 进行文件对话 🗃️。
-
-- 🌎 **多语言 UI**:
- - English, 中文 (简体), 中文 (繁體), العربية, Deutsch, Español, Français, Italiano
- - Polski, Português (PT), Português (BR), Русский, 日本語, Svenska, 한국어, Tiếng Việt
- - Türkçe, Nederlands, עברית, Català, Čeština, Dansk, Eesti, فارسی
- - Suomi, Magyar, Հայերեն, Bahasa Indonesia, ქართული, Latviešu, ไทย, ئۇيغۇرچە
-
-- 🧠 **推理 UI**:
- - 针对 DeepSeek-R1 等思维链/推理 AI 模型的动态推理 UI。
-
-- 🎨 **可定制界面**:
- - 可定制的下拉菜单和界面,同时适配高级用户和初学者。
-
-- 🌊 **[可恢复流 (Resumable Streams)](https://www.librechat.ai/docs/features/resumable_streams)**:
- - 永不丢失响应:AI 响应在连接中断后自动重连并继续。
- - 多标签页与多设备同步:在多个标签页打开同一对话,或在另一设备上继续。
- - 生产级可靠性:支持从单机部署到基于 Redis 的水平扩展。
-
-- 🗣️ **语音与音频**:
- - 通过语音转文字和文字转语音实现免提对话。
- - 自动发送并播放音频。
- - 支持 OpenAI, Azure OpenAI 和 Elevenlabs。
-
-- 📥 **导入与导出对话**:
- - 从 LibreChat, ChatGPT, Chatbot UI 导入对话。
- - 将对话导出为截图、Markdown、文本、JSON。
-
-- 🔍 **搜索与发现**:
- - 搜索所有消息和对话。
-
-- 👥 **多用户与安全访问**:
- - 支持 OAuth2, LDAP 和电子邮件登录的多用户安全认证。
- - 内置审核系统和 Token 消耗管理工具。
-
-- ⚙️ **配置与部署**:
- - 支持代理、反向代理、Docker 及多种部署选项。
- - 可完全本地运行或部署在云端。
-
-- 📖 **开源与社区**:
- - 完全开源且在公众监督下开发。
- - 社区驱动的开发、支持与反馈。
-
-[查看我们的文档了解更多功能详情](https://docs.librechat.ai/) 📚
-
-## 🪶 LibreChat:全方位的 AI 对话平台
-
-LibreChat 是一个自托管的 AI 对话平台,在一个注重隐私的统一界面中整合了所有主流 AI 服务商。
-
-除了对话功能外,LibreChat 还提供 AI 智能体、模型上下文协议 (MCP) 支持、Artifacts、代码解释器、自定义操作、对话搜索,以及企业级多用户认证。
-
-开源、活跃开发中,专为重视 AI 基础设施自主可控的用户而构建。
-
----
-
-## 🌐 资源
-
-**GitHub 仓库:**
- - **RAG API:** [github.com/danny-avila/rag_api](https://github.com/danny-avila/rag_api)
- - **网站:** [github.com/LibreChat-AI/librechat.ai](https://github.com/LibreChat-AI/librechat.ai)
-
-**其他:**
- - **官方网站:** [librechat.ai](https://librechat.ai)
- - **帮助文档:** [librechat.ai/docs](https://librechat.ai/docs)
- - **博客:** [librechat.ai/blog](https://librechat.ai/blog)
-
----
-
-## 📝 更新日志
-
-访问发布页面和更新日志以了解最新动态:
-- [发布页面 (Releases)](https://github.com/danny-avila/LibreChat/releases)
-- [更新日志 (Changelog)](https://www.librechat.ai/changelog)
-
-**⚠️ 在更新前请务必查看[更新日志](https://www.librechat.ai/changelog)以了解破坏性更改。**
-
----
-
-## ⭐ Star 历史
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
----
-
-## ✨ 贡献
-
-欢迎任何形式的贡献、建议、错误报告和修复!
-
-对于新功能、组件或扩展,请在发送 PR 前开启 issue 进行讨论。
-
-如果您想帮助我们将 LibreChat 翻译成您的母语,我们非常欢迎!改进翻译不仅能让全球用户更轻松地使用 LibreChat,还能提升整体用户体验。请查看我们的[翻译指南](https://www.librechat.ai/docs/translation)。
-
----
-
-## 💖 感谢所有贡献者
-
-
-
-
-
----
-
-## 🎉 特别鸣谢
-
-感谢 [Locize](https://locize.com) 提供的翻译管理工具,支持 LibreChat 的多语言功能。
-
-
-
-
-
-
diff --git a/api/app/clients/BaseClient.js b/api/app/clients/BaseClient.js
index 905cadfd23..a2dfaf9907 100644
--- a/api/app/clients/BaseClient.js
+++ b/api/app/clients/BaseClient.js
@@ -3,9 +3,7 @@ const fetch = require('node-fetch');
const { logger } = require('@librechat/data-schemas');
const {
countTokens,
- checkBalance,
getBalanceConfig,
- buildMessageFiles,
extractFileContext,
encodeAndFormatAudios,
encodeAndFormatVideos,
@@ -13,27 +11,34 @@ const {
} = require('@librechat/api');
const {
Constants,
+ ErrorTypes,
FileSources,
ContentTypes,
excludedKeys,
EModelEndpoint,
- mergeFileConfig,
isParamEndpoint,
isAgentsEndpoint,
isEphemeralAgentId,
supportsBalanceCheck,
- isBedrockDocumentType,
- getEndpointFileConfig,
} = require('librechat-data-provider');
+const {
+ updateMessage,
+ getMessages,
+ saveMessage,
+ saveConvo,
+ getConvo,
+ getFiles,
+} = require('~/models');
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
-const { logViolation } = require('~/cache');
+const { checkBalance } = require('~/models/balanceMethods');
+const { truncateToolCallOutputs } = require('./prompts');
const TextStream = require('./TextStream');
-const db = require('~/models');
class BaseClient {
constructor(apiKey, options = {}) {
this.apiKey = apiKey;
this.sender = options.sender ?? 'AI';
+ this.contextStrategy = null;
this.currentDateString = new Date().toLocaleDateString('en-us', {
year: 'numeric',
month: 'long',
@@ -73,10 +78,6 @@ class BaseClient {
this.currentMessages = [];
/** @type {import('librechat-data-provider').VisionModes | undefined} */
this.visionMode;
- /** @type {import('librechat-data-provider').FileConfig | undefined} */
- this._mergedFileConfig;
- /** @type {import('librechat-data-provider').EndpointFileConfig | undefined} */
- this._endpointFileConfig;
}
setOptions() {
@@ -121,9 +122,7 @@ class BaseClient {
* @returns {number}
*/
getTokenCountForResponse(responseMessage) {
- logger.debug('[BaseClient] `recordTokenUsage` not implemented.', {
- messageId: responseMessage?.messageId,
- });
+ logger.debug('[BaseClient] `recordTokenUsage` not implemented.', responseMessage);
}
/**
@@ -134,14 +133,12 @@ class BaseClient {
* @param {AppConfig['balance']} [balance]
* @param {number} promptTokens
* @param {number} completionTokens
- * @param {string} [messageId]
* @returns {Promise}
*/
- async recordTokenUsage({ model, balance, promptTokens, completionTokens, messageId }) {
+ async recordTokenUsage({ model, balance, promptTokens, completionTokens }) {
logger.debug('[BaseClient] `recordTokenUsage` not implemented.', {
model,
balance,
- messageId,
promptTokens,
completionTokens,
});
@@ -336,6 +333,45 @@ class BaseClient {
return payload;
}
+ async handleTokenCountMap(tokenCountMap) {
+ if (this.clientName === EModelEndpoint.agents) {
+ return;
+ }
+ if (this.currentMessages.length === 0) {
+ return;
+ }
+
+ for (let i = 0; i < this.currentMessages.length; i++) {
+ // Skip the last message, which is the user message.
+ if (i === this.currentMessages.length - 1) {
+ break;
+ }
+
+ const message = this.currentMessages[i];
+ const { messageId } = message;
+ const update = {};
+
+ if (messageId === tokenCountMap.summaryMessage?.messageId) {
+ logger.debug(`[BaseClient] Adding summary props to ${messageId}.`);
+
+ update.summary = tokenCountMap.summaryMessage.content;
+ update.summaryTokenCount = tokenCountMap.summaryMessage.tokenCount;
+ }
+
+ if (message.tokenCount && !update.summaryTokenCount) {
+ logger.debug(`[BaseClient] Skipping ${messageId}: already had a token count.`);
+ continue;
+ }
+
+ const tokenCount = tokenCountMap[messageId];
+ if (tokenCount) {
+ message.tokenCount = tokenCount;
+ update.tokenCount = tokenCount;
+ await this.updateMessageInDatabase({ messageId, ...update });
+ }
+ }
+ }
+
concatenateMessages(messages) {
return messages.reduce((acc, message) => {
const nameOrRole = message.name ?? message.role;
@@ -406,6 +442,154 @@ class BaseClient {
};
}
+ async handleContextStrategy({
+ instructions,
+ orderedMessages,
+ formattedMessages,
+ buildTokenMap = true,
+ }) {
+ let _instructions;
+ let tokenCount;
+
+ if (instructions) {
+ ({ tokenCount, ..._instructions } = instructions);
+ }
+
+ _instructions && logger.debug('[BaseClient] instructions tokenCount: ' + tokenCount);
+ if (tokenCount && tokenCount > this.maxContextTokens) {
+ const info = `${tokenCount} / ${this.maxContextTokens}`;
+ const errorMessage = `{ "type": "${ErrorTypes.INPUT_LENGTH}", "info": "${info}" }`;
+ logger.warn(`Instructions token count exceeds max token count (${info}).`);
+ throw new Error(errorMessage);
+ }
+
+ if (this.clientName === EModelEndpoint.agents) {
+ const { dbMessages, editedIndices } = truncateToolCallOutputs(
+ orderedMessages,
+ this.maxContextTokens,
+ this.getTokenCountForMessage.bind(this),
+ );
+
+ if (editedIndices.length > 0) {
+ logger.debug('[BaseClient] Truncated tool call outputs:', editedIndices);
+ for (const index of editedIndices) {
+ formattedMessages[index].content = dbMessages[index].content;
+ }
+ orderedMessages = dbMessages;
+ }
+ }
+
+ let orderedWithInstructions = this.addInstructions(orderedMessages, instructions);
+
+ let { context, remainingContextTokens, messagesToRefine } =
+ await this.getMessagesWithinTokenLimit({
+ messages: orderedWithInstructions,
+ instructions,
+ });
+
+ logger.debug('[BaseClient] Context Count (1/2)', {
+ remainingContextTokens,
+ maxContextTokens: this.maxContextTokens,
+ });
+
+ let summaryMessage;
+ let summaryTokenCount;
+ let { shouldSummarize } = this;
+
+ // Calculate the difference in length to determine how many messages were discarded if any
+ let payload;
+ let { length } = formattedMessages;
+ length += instructions != null ? 1 : 0;
+ const diff = length - context.length;
+ const firstMessage = orderedWithInstructions[0];
+ const usePrevSummary =
+ shouldSummarize &&
+ diff === 1 &&
+ firstMessage?.summary &&
+ this.previous_summary.messageId === firstMessage.messageId;
+
+ if (diff > 0) {
+ payload = formattedMessages.slice(diff);
+ logger.debug(
+ `[BaseClient] Difference between original payload (${length}) and context (${context.length}): ${diff}`,
+ );
+ }
+
+ payload = this.addInstructions(payload ?? formattedMessages, _instructions);
+
+ const latestMessage = orderedWithInstructions[orderedWithInstructions.length - 1];
+ if (payload.length === 0 && !shouldSummarize && latestMessage) {
+ const info = `${latestMessage.tokenCount} / ${this.maxContextTokens}`;
+ const errorMessage = `{ "type": "${ErrorTypes.INPUT_LENGTH}", "info": "${info}" }`;
+ logger.warn(`Prompt token count exceeds max token count (${info}).`);
+ throw new Error(errorMessage);
+ } else if (
+ _instructions &&
+ payload.length === 1 &&
+ payload[0].content === _instructions.content
+ ) {
+ const info = `${tokenCount + 3} / ${this.maxContextTokens}`;
+ const errorMessage = `{ "type": "${ErrorTypes.INPUT_LENGTH}", "info": "${info}" }`;
+ logger.warn(
+ `Including instructions, the prompt token count exceeds remaining max token count (${info}).`,
+ );
+ throw new Error(errorMessage);
+ }
+
+ if (usePrevSummary) {
+ summaryMessage = { role: 'system', content: firstMessage.summary };
+ summaryTokenCount = firstMessage.summaryTokenCount;
+ payload.unshift(summaryMessage);
+ remainingContextTokens -= summaryTokenCount;
+ } else if (shouldSummarize && messagesToRefine.length > 0) {
+ ({ summaryMessage, summaryTokenCount } = await this.summarizeMessages({
+ messagesToRefine,
+ remainingContextTokens,
+ }));
+ summaryMessage && payload.unshift(summaryMessage);
+ remainingContextTokens -= summaryTokenCount;
+ }
+
+ // Make sure to only continue summarization logic if the summary message was generated
+ shouldSummarize = summaryMessage != null && shouldSummarize === true;
+
+ logger.debug('[BaseClient] Context Count (2/2)', {
+ remainingContextTokens,
+ maxContextTokens: this.maxContextTokens,
+ });
+
+ /** @type {Record | undefined} */
+ let tokenCountMap;
+ if (buildTokenMap) {
+ const currentPayload = shouldSummarize ? orderedWithInstructions : context;
+ tokenCountMap = currentPayload.reduce((map, message, index) => {
+ const { messageId } = message;
+ if (!messageId) {
+ return map;
+ }
+
+ if (shouldSummarize && index === messagesToRefine.length - 1 && !usePrevSummary) {
+ map.summaryMessage = { ...summaryMessage, messageId, tokenCount: summaryTokenCount };
+ }
+
+ map[messageId] = currentPayload[index].tokenCount;
+ return map;
+ }, {});
+ }
+
+ const promptTokens = this.maxContextTokens - remainingContextTokens;
+
+ logger.debug('[BaseClient] tokenCountMap:', tokenCountMap);
+ logger.debug('[BaseClient]', {
+ promptTokens,
+ remainingContextTokens,
+ payloadSize: payload.length,
+ maxContextTokens: this.maxContextTokens,
+ });
+
+ return { payload, tokenCountMap, promptTokens, messages: orderedWithInstructions };
+ }
+
async sendMessage(message, opts = {}) {
const appConfig = this.options.req?.config;
/** @type {Promise} */
@@ -474,30 +658,18 @@ class BaseClient {
opts,
);
- if (tokenCountMap && tokenCountMap[userMessage.messageId]) {
- userMessage.tokenCount = tokenCountMap[userMessage.messageId];
- logger.debug('[BaseClient] userMessage', {
- messageId: userMessage.messageId,
- tokenCount: userMessage.tokenCount,
- conversationId: userMessage.conversationId,
- });
+ if (tokenCountMap) {
+ logger.debug('[BaseClient] tokenCountMap', tokenCountMap);
+ if (tokenCountMap[userMessage.messageId]) {
+ userMessage.tokenCount = tokenCountMap[userMessage.messageId];
+ logger.debug('[BaseClient] userMessage', userMessage);
+ }
+
+ this.handleTokenCountMap(tokenCountMap);
}
if (!isEdited && !this.skipSaveUserMessage) {
- const reqFiles = this.options.req?.body?.files;
- if (reqFiles && Array.isArray(this.options.attachments)) {
- const files = buildMessageFiles(reqFiles, this.options.attachments);
- if (files.length > 0) {
- userMessage.files = files;
- }
- delete userMessage.image_urls;
- }
- userMessagePromise = this.saveMessageToDatabase(userMessage, saveOptions, user).catch(
- (err) => {
- logger.error('[BaseClient] Failed to save user message:', err);
- return {};
- },
- );
+ userMessagePromise = this.saveMessageToDatabase(userMessage, saveOptions, user);
this.savedMessageIds.add(userMessage.messageId);
if (typeof opts?.getReqData === 'function') {
opts.getReqData({
@@ -511,28 +683,18 @@ class BaseClient {
balanceConfig?.enabled &&
supportsBalanceCheck[this.options.endpointType ?? this.options.endpoint]
) {
- await checkBalance(
- {
- req: this.options.req,
- res: this.options.res,
- txData: {
- user: this.user,
- tokenType: 'prompt',
- amount: promptTokens,
- endpoint: this.options.endpoint,
- model: this.modelOptions?.model ?? this.model,
- endpointTokenConfig: this.options.endpointTokenConfig,
- },
+ await checkBalance({
+ req: this.options.req,
+ res: this.options.res,
+ txData: {
+ user: this.user,
+ tokenType: 'prompt',
+ amount: promptTokens,
+ endpoint: this.options.endpoint,
+ model: this.modelOptions?.model ?? this.model,
+ endpointTokenConfig: this.options.endpointTokenConfig,
},
- {
- logViolation,
- getMultiplier: db.getMultiplier,
- findBalanceByUser: db.findBalanceByUser,
- createAutoRefillTransaction: db.createAutoRefillTransaction,
- balanceConfig,
- upsertBalanceFields: db.upsertBalanceFields,
- },
- );
+ });
}
const { completion, metadata } = await this.sendCompletion(payload, opts);
@@ -585,7 +747,12 @@ class BaseClient {
responseMessage.text = completion.join('');
}
- if (tokenCountMap && this.recordTokenUsage && this.getTokenCountForResponse) {
+ if (
+ tokenCountMap &&
+ this.recordTokenUsage &&
+ this.getTokenCountForResponse &&
+ this.getTokenCount
+ ) {
let completionTokens;
/**
@@ -598,6 +765,13 @@ class BaseClient {
if (usage != null && Number(usage[this.outputTokensKey]) > 0) {
responseMessage.tokenCount = usage[this.outputTokensKey];
completionTokens = responseMessage.tokenCount;
+ await this.updateUserMessageTokenCount({
+ usage,
+ tokenCountMap,
+ userMessage,
+ userMessagePromise,
+ opts,
+ });
} else {
responseMessage.tokenCount = this.getTokenCountForResponse(responseMessage);
completionTokens = responseMessage.tokenCount;
@@ -606,45 +780,15 @@ class BaseClient {
promptTokens,
completionTokens,
balance: balanceConfig,
- /** Note: When using agents, responseMessage.model is the agent ID, not the model */
- model: this.model,
- messageId: this.responseMessageId,
+ model: responseMessage.model,
});
}
-
- logger.debug('[BaseClient] Response token usage', {
- messageId: responseMessage.messageId,
- model: responseMessage.model,
- promptTokens,
- completionTokens,
- });
}
if (userMessagePromise) {
await userMessagePromise;
}
- if (
- this.contextMeta?.calibrationRatio > 0 &&
- this.contextMeta.calibrationRatio !== 1 &&
- userMessage.tokenCount > 0
- ) {
- const calibrated = Math.round(userMessage.tokenCount * this.contextMeta.calibrationRatio);
- if (calibrated !== userMessage.tokenCount) {
- logger.debug('[BaseClient] Calibrated user message tokenCount', {
- messageId: userMessage.messageId,
- raw: userMessage.tokenCount,
- calibrated,
- ratio: this.contextMeta.calibrationRatio,
- });
- userMessage.tokenCount = calibrated;
- await this.updateMessageInDatabase({
- messageId: userMessage.messageId,
- tokenCount: calibrated,
- });
- }
- }
-
if (this.artifactPromises) {
responseMessage.attachments = (await Promise.all(this.artifactPromises)).filter((a) => a);
}
@@ -657,10 +801,6 @@ class BaseClient {
}
}
- if (this.contextMeta) {
- responseMessage.contextMeta = this.contextMeta;
- }
-
responseMessage.databasePromise = this.saveMessageToDatabase(
responseMessage,
saveOptions,
@@ -671,10 +811,79 @@ class BaseClient {
return responseMessage;
}
+ /**
+ * Stream usage should only be used for user message token count re-calculation if:
+ * - The stream usage is available, with input tokens greater than 0,
+ * - the client provides a function to calculate the current token count,
+ * - files are being resent with every message (default behavior; or if `false`, with no attachments),
+ * - the `promptPrefix` (custom instructions) is not set.
+ *
+ * In these cases, the legacy token estimations would be more accurate.
+ *
+ * TODO: included system messages in the `orderedMessages` accounting, potentially as a
+ * separate message in the UI. ChatGPT does this through "hidden" system messages.
+ * @param {object} params
+ * @param {StreamUsage} params.usage
+ * @param {Record} params.tokenCountMap
+ * @param {TMessage} params.userMessage
+ * @param {Promise} params.userMessagePromise
+ * @param {object} params.opts
+ */
+ async updateUserMessageTokenCount({
+ usage,
+ tokenCountMap,
+ userMessage,
+ userMessagePromise,
+ opts,
+ }) {
+ /** @type {boolean} */
+ const shouldUpdateCount =
+ this.calculateCurrentTokenCount != null &&
+ Number(usage[this.inputTokensKey]) > 0 &&
+ (this.options.resendFiles ||
+ (!this.options.resendFiles && !this.options.attachments?.length)) &&
+ !this.options.promptPrefix;
+
+ if (!shouldUpdateCount) {
+ return;
+ }
+
+ const userMessageTokenCount = this.calculateCurrentTokenCount({
+ currentMessageId: userMessage.messageId,
+ tokenCountMap,
+ usage,
+ });
+
+ if (userMessageTokenCount === userMessage.tokenCount) {
+ return;
+ }
+
+ userMessage.tokenCount = userMessageTokenCount;
+ /*
+ Note: `AgentController` saves the user message if not saved here
+ (noted by `savedMessageIds`), so we update the count of its `userMessage` reference
+ */
+ if (typeof opts?.getReqData === 'function') {
+ opts.getReqData({
+ userMessage,
+ });
+ }
+ /*
+ Note: we update the user message to be sure it gets the calculated token count;
+ though `AgentController` saves the user message if not saved here
+ (noted by `savedMessageIds`), EditController does not
+ */
+ await userMessagePromise;
+ await this.updateMessageInDatabase({
+ messageId: userMessage.messageId,
+ tokenCount: userMessageTokenCount,
+ });
+ }
+
async loadHistory(conversationId, parentMessageId = null) {
logger.debug('[BaseClient] Loading history:', { conversationId, parentMessageId });
- const messages = (await db.getMessages({ conversationId })) ?? [];
+ const messages = (await getMessages({ conversationId })) ?? [];
if (messages.length === 0) {
return [];
@@ -697,24 +906,10 @@ class BaseClient {
return _messages;
}
+ // Find the latest message with a 'summary' property
for (let i = _messages.length - 1; i >= 0; i--) {
- const msg = _messages[i];
- if (!msg) {
- continue;
- }
-
- const summaryBlock = BaseClient.findSummaryContentBlock(msg);
- if (summaryBlock) {
- this.previous_summary = {
- ...msg,
- summary: BaseClient.getSummaryText(summaryBlock),
- summaryTokenCount: summaryBlock.tokenCount,
- };
- break;
- }
-
- if (msg.summary) {
- this.previous_summary = msg;
+ if (_messages[i]?.summary) {
+ this.previous_summary = _messages[i];
break;
}
}
@@ -739,30 +934,16 @@ class BaseClient {
* @param {string | null} user
*/
async saveMessageToDatabase(message, endpointOptions, user = null) {
- // Snapshot options before any await; disposeClient may set client.options = null
- // while this method is suspended at an I/O boundary, but the local reference
- // remains valid (disposeClient nulls the property, not the object itself).
- const options = this.options;
- if (!options) {
- logger.error('[BaseClient] saveMessageToDatabase: client disposed before save, skipping');
- return {};
- }
-
if (this.user && user !== this.user) {
throw new Error('User mismatch.');
}
- const hasAddedConvo = options?.req?.body?.addedConvo != null;
- const reqCtx = {
- userId: options?.req?.user?.id,
- isTemporary: options?.req?.body?.isTemporary,
- interfaceConfig: options?.req?.config?.interfaceConfig,
- };
- const savedMessage = await db.saveMessage(
- reqCtx,
+ const hasAddedConvo = this.options?.req?.body?.addedConvo != null;
+ const savedMessage = await saveMessage(
+ this.options?.req,
{
...message,
- endpoint: options.endpoint,
+ endpoint: this.options.endpoint,
unfinished: false,
user,
...(hasAddedConvo && { addedConvo: true }),
@@ -776,20 +957,20 @@ class BaseClient {
const fieldsToKeep = {
conversationId: message.conversationId,
- endpoint: options.endpoint,
- endpointType: options.endpointType,
+ endpoint: this.options.endpoint,
+ endpointType: this.options.endpointType,
...endpointOptions,
};
const existingConvo =
this.fetchedConvo === true
? null
- : await db.getConvo(options?.req?.user?.id, message.conversationId);
+ : await getConvo(this.options?.req?.user?.id, message.conversationId);
const unsetFields = {};
const exceptions = new Set(['spec', 'iconURL']);
const hasNonEphemeralAgent =
- isAgentsEndpoint(options.endpoint) &&
+ isAgentsEndpoint(this.options.endpoint) &&
endpointOptions?.agent_id &&
!isEphemeralAgentId(endpointOptions.agent_id);
if (hasNonEphemeralAgent) {
@@ -811,7 +992,7 @@ class BaseClient {
}
}
- const conversation = await db.saveConvo(reqCtx, fieldsToKeep, {
+ const conversation = await saveConvo(this.options?.req, fieldsToKeep, {
context: 'api/app/clients/BaseClient.js - saveMessageToDatabase #saveConvo',
unsetFields,
});
@@ -824,35 +1005,7 @@ class BaseClient {
* @param {Partial} message
*/
async updateMessageInDatabase(message) {
- await db.updateMessage(this.options?.req?.user?.id, message);
- }
-
- /** Extracts text from a summary block (handles both legacy `text` field and new `content` array format). */
- static getSummaryText(summaryBlock) {
- if (Array.isArray(summaryBlock.content)) {
- return summaryBlock.content.map((b) => b.text ?? '').join('');
- }
- if (typeof summaryBlock.content === 'string') {
- return summaryBlock.content;
- }
- return summaryBlock.text ?? '';
- }
-
- /** Finds the last summary content block in a message's content array (last-summary-wins). */
- static findSummaryContentBlock(message) {
- if (!Array.isArray(message?.content)) {
- return null;
- }
- let lastSummary = null;
- for (const part of message.content) {
- if (
- part?.type === ContentTypes.SUMMARY &&
- BaseClient.getSummaryText(part).trim().length > 0
- ) {
- lastSummary = part;
- }
- }
- return lastSummary;
+ await updateMessage(this.options.req, message);
}
/**
@@ -909,35 +1062,20 @@ class BaseClient {
break;
}
- let resolved = message;
- let hasSummary = false;
- if (summary) {
- const summaryBlock = BaseClient.findSummaryContentBlock(message);
- if (summaryBlock) {
- const summaryText = BaseClient.getSummaryText(summaryBlock);
- resolved = {
- ...message,
- role: 'system',
- content: [{ type: ContentTypes.TEXT, text: summaryText }],
- tokenCount: summaryBlock.tokenCount,
- };
- hasSummary = true;
- } else if (message.summary) {
- resolved = {
- ...message,
- role: 'system',
- content: [{ type: ContentTypes.TEXT, text: message.summary }],
- tokenCount: message.summaryTokenCount ?? message.tokenCount,
- };
- hasSummary = true;
- }
+ if (summary && message.summary) {
+ message.role = 'system';
+ message.text = message.summary;
}
- const shouldMap = mapMethod != null && (mapCondition != null ? mapCondition(resolved) : true);
- const processedMessage = shouldMap ? mapMethod(resolved) : resolved;
+ if (summary && message.summaryTokenCount) {
+ message.tokenCount = message.summaryTokenCount;
+ }
+
+ const shouldMap = mapMethod != null && (mapCondition != null ? mapCondition(message) : true);
+ const processedMessage = shouldMap ? mapMethod(message) : message;
orderedMessages.push(processedMessage);
- if (hasSummary) {
+ if (summary && message.summary) {
break;
}
@@ -1093,7 +1231,6 @@ class BaseClient {
provider: this.options.agent?.provider ?? this.options.endpoint,
endpoint: this.options.agent?.endpoint ?? this.options.endpoint,
useResponsesApi: this.options.agent?.model_parameters?.useResponsesApi,
- model: this.modelOptions?.model ?? this.model,
},
getStrategyFunctions,
);
@@ -1163,19 +1300,6 @@ class BaseClient {
const allFiles = [];
- const provider = this.options.agent?.provider ?? this.options.endpoint;
- const isBedrock = provider === EModelEndpoint.bedrock;
-
- if (!this._mergedFileConfig && this.options.req?.config?.fileConfig) {
- this._mergedFileConfig = mergeFileConfig(this.options.req.config.fileConfig);
- const endpoint = this.options.agent?.endpoint ?? this.options.endpoint;
- this._endpointFileConfig = getEndpointFileConfig({
- fileConfig: this._mergedFileConfig,
- endpoint,
- endpointType: this.options.endpointType,
- });
- }
-
for (const file of attachments) {
/** @type {FileSources} */
const source = file.source ?? FileSources.local;
@@ -1193,23 +1317,12 @@ class BaseClient {
} else if (file.type === 'application/pdf') {
categorizedAttachments.documents.push(file);
allFiles.push(file);
- } else if (isBedrock && isBedrockDocumentType(file.type)) {
- categorizedAttachments.documents.push(file);
- allFiles.push(file);
} else if (file.type.startsWith('video/')) {
categorizedAttachments.videos.push(file);
allFiles.push(file);
} else if (file.type.startsWith('audio/')) {
categorizedAttachments.audios.push(file);
allFiles.push(file);
- } else if (
- file.type &&
- this._mergedFileConfig &&
- this._endpointFileConfig?.supportedMimeTypes &&
- this._mergedFileConfig.checkType(file.type, this._endpointFileConfig.supportedMimeTypes)
- ) {
- categorizedAttachments.documents.push(file);
- allFiles.push(file);
}
}
@@ -1286,7 +1399,7 @@ class BaseClient {
return message;
}
- const files = await db.getFiles(
+ const files = await getFiles(
{
file_id: { $in: fileIds },
},
diff --git a/api/app/clients/prompts/truncate.js b/api/app/clients/prompts/truncate.js
index e744b40daa..564b39efeb 100644
--- a/api/app/clients/prompts/truncate.js
+++ b/api/app/clients/prompts/truncate.js
@@ -37,4 +37,79 @@ function smartTruncateText(text, maxLength = MAX_CHAR) {
return text;
}
-module.exports = { truncateText, smartTruncateText };
+/**
+ * @param {TMessage[]} _messages
+ * @param {number} maxContextTokens
+ * @param {function({role: string, content: TMessageContent[]}): number} getTokenCountForMessage
+ *
+ * @returns {{
+ * dbMessages: TMessage[],
+ * editedIndices: number[]
+ * }}
+ */
+function truncateToolCallOutputs(_messages, maxContextTokens, getTokenCountForMessage) {
+ const THRESHOLD_PERCENTAGE = 0.5;
+ const targetTokenLimit = maxContextTokens * THRESHOLD_PERCENTAGE;
+
+ let currentTokenCount = 3;
+ const messages = [..._messages];
+ const processedMessages = [];
+ let currentIndex = messages.length;
+ const editedIndices = new Set();
+ while (messages.length > 0) {
+ currentIndex--;
+ const message = messages.pop();
+ currentTokenCount += message.tokenCount;
+ if (currentTokenCount < targetTokenLimit) {
+ processedMessages.push(message);
+ continue;
+ }
+
+ if (!message.content || !Array.isArray(message.content)) {
+ processedMessages.push(message);
+ continue;
+ }
+
+ const toolCallIndices = message.content
+ .map((item, index) => (item.type === 'tool_call' ? index : -1))
+ .filter((index) => index !== -1)
+ .reverse();
+
+ if (toolCallIndices.length === 0) {
+ processedMessages.push(message);
+ continue;
+ }
+
+ const newContent = [...message.content];
+
+ // Truncate all tool outputs since we're over threshold
+ for (const index of toolCallIndices) {
+ const toolCall = newContent[index].tool_call;
+ if (!toolCall || !toolCall.output) {
+ continue;
+ }
+
+ editedIndices.add(currentIndex);
+
+ newContent[index] = {
+ ...newContent[index],
+ tool_call: {
+ ...toolCall,
+ output: '[OUTPUT_OMITTED_FOR_BREVITY]',
+ },
+ };
+ }
+
+ const truncatedMessage = {
+ ...message,
+ content: newContent,
+ tokenCount: getTokenCountForMessage({ role: 'assistant', content: newContent }),
+ };
+
+ processedMessages.push(truncatedMessage);
+ }
+
+ return { dbMessages: processedMessages.reverse(), editedIndices: Array.from(editedIndices) };
+}
+
+module.exports = { truncateText, smartTruncateText, truncateToolCallOutputs };
diff --git a/api/app/clients/specs/BaseClient.test.js b/api/app/clients/specs/BaseClient.test.js
index 3ce910948c..fed80de28c 100644
--- a/api/app/clients/specs/BaseClient.test.js
+++ b/api/app/clients/specs/BaseClient.test.js
@@ -38,7 +38,7 @@ jest.mock('~/models', () => ({
updateFileUsage: jest.fn(),
}));
-const { getConvo, saveConvo, saveMessage } = require('~/models');
+const { getConvo, saveConvo } = require('~/models');
jest.mock('@librechat/agents', () => {
const actual = jest.requireActual('@librechat/agents');
@@ -355,8 +355,7 @@ describe('BaseClient', () => {
id: '3',
parentMessageId: '2',
role: 'system',
- text: 'Message 3',
- content: [{ type: 'text', text: 'Summary for Message 3' }],
+ text: 'Summary for Message 3',
summary: 'Summary for Message 3',
},
{ id: '4', parentMessageId: '3', text: 'Message 4' },
@@ -381,8 +380,7 @@ describe('BaseClient', () => {
id: '4',
parentMessageId: '3',
role: 'system',
- text: 'Message 4',
- content: [{ type: 'text', text: 'Summary for Message 4' }],
+ text: 'Summary for Message 4',
summary: 'Summary for Message 4',
},
{ id: '5', parentMessageId: '4', text: 'Message 5' },
@@ -407,123 +405,12 @@ describe('BaseClient', () => {
id: '4',
parentMessageId: '3',
role: 'system',
- text: 'Message 4',
- content: [{ type: 'text', text: 'Summary for Message 4' }],
+ text: 'Summary for Message 4',
summary: 'Summary for Message 4',
},
{ id: '5', parentMessageId: '4', text: 'Message 5' },
]);
});
-
- it('should detect summary content block and use it over legacy fields (summary mode)', () => {
- const messagesWithContentBlock = [
- { id: '3', parentMessageId: '2', text: 'Message 3' },
- {
- id: '2',
- parentMessageId: '1',
- text: 'Message 2',
- content: [
- { type: 'text', text: 'Original text' },
- { type: 'summary', text: 'Content block summary', tokenCount: 42 },
- ],
- },
- { id: '1', parentMessageId: null, text: 'Message 1' },
- ];
- const result = TestClient.constructor.getMessagesForConversation({
- messages: messagesWithContentBlock,
- parentMessageId: '3',
- summary: true,
- });
- expect(result).toHaveLength(2);
- expect(result[0].role).toBe('system');
- expect(result[0].content).toEqual([{ type: 'text', text: 'Content block summary' }]);
- expect(result[0].tokenCount).toBe(42);
- });
-
- it('should prefer content block summary over legacy summary field', () => {
- const messagesWithBoth = [
- { id: '2', parentMessageId: '1', text: 'Message 2' },
- {
- id: '1',
- parentMessageId: null,
- text: 'Message 1',
- summary: 'Legacy summary',
- summaryTokenCount: 10,
- content: [{ type: 'summary', text: 'Content block summary', tokenCount: 20 }],
- },
- ];
- const result = TestClient.constructor.getMessagesForConversation({
- messages: messagesWithBoth,
- parentMessageId: '2',
- summary: true,
- });
- expect(result).toHaveLength(2);
- expect(result[0].content).toEqual([{ type: 'text', text: 'Content block summary' }]);
- expect(result[0].tokenCount).toBe(20);
- });
-
- it('should fallback to legacy summary when no content block exists', () => {
- const messagesWithLegacy = [
- { id: '2', parentMessageId: '1', text: 'Message 2' },
- {
- id: '1',
- parentMessageId: null,
- text: 'Message 1',
- summary: 'Legacy summary only',
- summaryTokenCount: 15,
- },
- ];
- const result = TestClient.constructor.getMessagesForConversation({
- messages: messagesWithLegacy,
- parentMessageId: '2',
- summary: true,
- });
- expect(result).toHaveLength(2);
- expect(result[0].content).toEqual([{ type: 'text', text: 'Legacy summary only' }]);
- expect(result[0].tokenCount).toBe(15);
- });
- });
-
- describe('findSummaryContentBlock', () => {
- it('should find a summary block in the content array', () => {
- const message = {
- content: [
- { type: 'text', text: 'some text' },
- { type: 'summary', text: 'Summary of conversation', tokenCount: 50 },
- ],
- };
- const result = TestClient.constructor.findSummaryContentBlock(message);
- expect(result).toBeTruthy();
- expect(result.text).toBe('Summary of conversation');
- expect(result.tokenCount).toBe(50);
- });
-
- it('should return null when no summary block exists', () => {
- const message = {
- content: [
- { type: 'text', text: 'some text' },
- { type: 'tool_call', tool_call: {} },
- ],
- };
- expect(TestClient.constructor.findSummaryContentBlock(message)).toBeNull();
- });
-
- it('should return null for string content', () => {
- const message = { content: 'just a string' };
- expect(TestClient.constructor.findSummaryContentBlock(message)).toBeNull();
- });
-
- it('should return null for missing content', () => {
- expect(TestClient.constructor.findSummaryContentBlock({})).toBeNull();
- expect(TestClient.constructor.findSummaryContentBlock(null)).toBeNull();
- });
-
- it('should skip summary blocks with no text', () => {
- const message = {
- content: [{ type: 'summary', tokenCount: 10 }],
- };
- expect(TestClient.constructor.findSummaryContentBlock(message)).toBeNull();
- });
});
describe('sendMessage', () => {
@@ -906,52 +793,6 @@ describe('BaseClient', () => {
);
});
- test('saveMessageToDatabase returns early when this.options is null (client disposed)', async () => {
- const savedOptions = TestClient.options;
- TestClient.options = null;
- saveMessage.mockClear();
-
- const result = await TestClient.saveMessageToDatabase(
- { messageId: 'msg-1', conversationId: 'conv-1', isCreatedByUser: true, text: 'hi' },
- {},
- null,
- );
-
- expect(result).toEqual({});
- expect(saveMessage).not.toHaveBeenCalled();
-
- TestClient.options = savedOptions;
- });
-
- test('saveMessageToDatabase uses snapshot of options, immune to mid-await disposal', async () => {
- const savedOptions = TestClient.options;
- saveMessage.mockClear();
- saveConvo.mockClear();
-
- // Make db.saveMessage yield, simulating I/O suspension during which disposal occurs
- saveMessage.mockImplementation(async (_reqCtx, msgData) => {
- // Simulate disposeClient nullifying client.options while awaiting
- TestClient.options = null;
- return msgData;
- });
- saveConvo.mockResolvedValue({ conversationId: 'conv-1' });
-
- const result = await TestClient.saveMessageToDatabase(
- { messageId: 'msg-1', conversationId: 'conv-1', isCreatedByUser: true, text: 'hi' },
- { endpoint: 'openAI' },
- null,
- );
-
- // Should complete without TypeError, using the snapshotted options
- expect(result).toHaveProperty('message');
- expect(result).toHaveProperty('conversation');
- expect(saveMessage).toHaveBeenCalled();
-
- TestClient.options = savedOptions;
- saveMessage.mockReset();
- saveConvo.mockReset();
- });
-
test('userMessagePromise is awaited before saving response message', async () => {
// Mock the saveMessageToDatabase method
TestClient.saveMessageToDatabase = jest.fn().mockImplementation(() => {
@@ -980,56 +821,6 @@ describe('BaseClient', () => {
});
});
- describe('recordTokenUsage model assignment', () => {
- test('should pass this.model to recordTokenUsage, not the agent ID from responseMessage.model', async () => {
- const actualModel = 'claude-opus-4-5';
- const agentId = 'agent_p5Z_IU6EIxBoqn1BoqLBp';
-
- TestClient.model = actualModel;
- TestClient.options.endpoint = 'agents';
- TestClient.options.agent = { id: agentId };
-
- TestClient.getTokenCountForResponse = jest.fn().mockReturnValue(50);
- TestClient.recordTokenUsage = jest.fn().mockResolvedValue(undefined);
- TestClient.buildMessages.mockReturnValue({
- prompt: [],
- tokenCountMap: { res: 50 },
- });
-
- await TestClient.sendMessage('Hello', {});
-
- expect(TestClient.recordTokenUsage).toHaveBeenCalledWith(
- expect.objectContaining({
- model: actualModel,
- }),
- );
-
- const callArgs = TestClient.recordTokenUsage.mock.calls[0][0];
- expect(callArgs.model).not.toBe(agentId);
- });
-
- test('should pass this.model even when this.model differs from modelOptions.model', async () => {
- const instanceModel = 'gpt-4o';
- TestClient.model = instanceModel;
- TestClient.modelOptions = { model: 'gpt-4o-mini' };
-
- TestClient.getTokenCountForResponse = jest.fn().mockReturnValue(50);
- TestClient.recordTokenUsage = jest.fn().mockResolvedValue(undefined);
- TestClient.buildMessages.mockReturnValue({
- prompt: [],
- tokenCountMap: { res: 50 },
- });
-
- await TestClient.sendMessage('Hello', {});
-
- expect(TestClient.recordTokenUsage).toHaveBeenCalledWith(
- expect.objectContaining({
- model: instanceModel,
- }),
- );
- });
- });
-
describe('getMessagesWithinTokenLimit with instructions', () => {
test('should always include instructions when present', async () => {
TestClient.maxContextTokens = 50;
@@ -1137,123 +928,4 @@ describe('BaseClient', () => {
expect(result.remainingContextTokens).toBe(2); // 25 - 20 - 3(assistant label)
});
});
-
- describe('sendMessage file population', () => {
- const attachment = {
- file_id: 'file-abc',
- filename: 'image.png',
- filepath: '/uploads/image.png',
- type: 'image/png',
- bytes: 1024,
- object: 'file',
- user: 'user-1',
- embedded: false,
- usage: 0,
- text: 'large ocr blob that should be stripped',
- _id: 'mongo-id-1',
- };
-
- beforeEach(() => {
- TestClient.options.req = { body: { files: [{ file_id: 'file-abc' }] } };
- TestClient.options.attachments = [attachment];
- });
-
- test('populates userMessage.files before saveMessageToDatabase is called', async () => {
- TestClient.saveMessageToDatabase = jest.fn().mockImplementation((msg) => {
- return Promise.resolve({ message: msg });
- });
-
- await TestClient.sendMessage('Hello');
-
- const userSave = TestClient.saveMessageToDatabase.mock.calls.find(
- ([msg]) => msg.isCreatedByUser,
- );
- expect(userSave).toBeDefined();
- expect(userSave[0].files).toBeDefined();
- expect(userSave[0].files).toHaveLength(1);
- expect(userSave[0].files[0].file_id).toBe('file-abc');
- });
-
- test('strips text and _id from files before saving', async () => {
- TestClient.saveMessageToDatabase = jest.fn().mockResolvedValue({ message: {} });
-
- await TestClient.sendMessage('Hello');
-
- const userSave = TestClient.saveMessageToDatabase.mock.calls.find(
- ([msg]) => msg.isCreatedByUser,
- );
- expect(userSave[0].files[0].text).toBeUndefined();
- expect(userSave[0].files[0]._id).toBeUndefined();
- expect(userSave[0].files[0].filename).toBe('image.png');
- });
-
- test('deletes image_urls from userMessage when files are present', async () => {
- TestClient.saveMessageToDatabase = jest.fn().mockResolvedValue({ message: {} });
- TestClient.options.attachments = [
- { ...attachment, image_urls: ['data:image/png;base64,...'] },
- ];
-
- await TestClient.sendMessage('Hello');
-
- const userSave = TestClient.saveMessageToDatabase.mock.calls.find(
- ([msg]) => msg.isCreatedByUser,
- );
- expect(userSave[0].image_urls).toBeUndefined();
- });
-
- test('does not set files when no attachments match request file IDs', async () => {
- TestClient.options.req = { body: { files: [{ file_id: 'file-nomatch' }] } };
- TestClient.saveMessageToDatabase = jest.fn().mockResolvedValue({ message: {} });
-
- await TestClient.sendMessage('Hello');
-
- const userSave = TestClient.saveMessageToDatabase.mock.calls.find(
- ([msg]) => msg.isCreatedByUser,
- );
- expect(userSave[0].files).toBeUndefined();
- });
-
- test('skips file population when attachments is not an array (Promise case)', async () => {
- TestClient.options.attachments = Promise.resolve([attachment]);
- TestClient.saveMessageToDatabase = jest.fn().mockResolvedValue({ message: {} });
-
- await TestClient.sendMessage('Hello');
-
- const userSave = TestClient.saveMessageToDatabase.mock.calls.find(
- ([msg]) => msg.isCreatedByUser,
- );
- expect(userSave[0].files).toBeUndefined();
- });
-
- test('skips file population when skipSaveUserMessage is true', async () => {
- TestClient.skipSaveUserMessage = true;
- TestClient.saveMessageToDatabase = jest.fn().mockResolvedValue({ message: {} });
-
- await TestClient.sendMessage('Hello');
-
- const userSave = TestClient.saveMessageToDatabase.mock.calls.find(
- ([msg]) => msg?.isCreatedByUser,
- );
- expect(userSave).toBeUndefined();
- });
-
- test('ignores file_id: undefined entries in req.body.files (no set poisoning)', async () => {
- TestClient.options.req = {
- body: { files: [{ file_id: undefined }, { file_id: 'file-abc' }] },
- };
- TestClient.options.attachments = [
- { ...attachment, file_id: undefined },
- { ...attachment, file_id: 'file-abc' },
- ];
- TestClient.saveMessageToDatabase = jest.fn().mockResolvedValue({ message: {} });
-
- await TestClient.sendMessage('Hello');
-
- const userSave = TestClient.saveMessageToDatabase.mock.calls.find(
- ([msg]) => msg.isCreatedByUser,
- );
- expect(userSave[0].files).toHaveLength(1);
- expect(userSave[0].files[0].file_id).toBe('file-abc');
- });
- });
});
diff --git a/api/app/clients/tools/manifest.json b/api/app/clients/tools/manifest.json
index 9637c20867..7930e67ac9 100644
--- a/api/app/clients/tools/manifest.json
+++ b/api/app/clients/tools/manifest.json
@@ -16,7 +16,7 @@
"name": "Google",
"pluginKey": "google",
"description": "Use Google Search to find information about the weather, news, sports, and more.",
- "icon": "assets/google-search.svg",
+ "icon": "https://i.imgur.com/SMmVkNB.png",
"authConfig": [
{
"authField": "GOOGLE_CSE_ID",
@@ -61,7 +61,7 @@
"name": "DALL-E-3",
"pluginKey": "dalle",
"description": "[DALL-E-3] Create realistic images and art from a description in natural language",
- "icon": "assets/openai.svg",
+ "icon": "https://i.imgur.com/u2TzXzH.png",
"authConfig": [
{
"authField": "DALLE3_API_KEY||DALLE_API_KEY",
@@ -74,7 +74,7 @@
"name": "Tavily Search",
"pluginKey": "tavily_search_results_json",
"description": "Tavily Search is a robust search API tailored for LLM Agents. It seamlessly integrates with diverse data sources to ensure a superior, relevant search experience.",
- "icon": "assets/tavily.svg",
+ "icon": "https://tavily.com/favicon.ico",
"authConfig": [
{
"authField": "TAVILY_API_KEY",
@@ -87,14 +87,14 @@
"name": "Calculator",
"pluginKey": "calculator",
"description": "Perform simple and complex mathematical calculations.",
- "icon": "assets/calculator.svg",
+ "icon": "https://i.imgur.com/RHsSG5h.png",
"authConfig": []
},
{
"name": "Stable Diffusion",
"pluginKey": "stable-diffusion",
"description": "Generate photo-realistic images given any text input.",
- "icon": "assets/stability-ai.svg",
+ "icon": "https://i.imgur.com/Yr466dp.png",
"authConfig": [
{
"authField": "SD_WEBUI_URL",
@@ -107,7 +107,7 @@
"name": "Azure AI Search",
"pluginKey": "azure-ai-search",
"description": "Use Azure AI Search to find information",
- "icon": "assets/azure-ai-search.svg",
+ "icon": "https://i.imgur.com/E7crPze.png",
"authConfig": [
{
"authField": "AZURE_AI_SEARCH_SERVICE_ENDPOINT",
@@ -143,7 +143,7 @@
"name": "Flux",
"pluginKey": "flux",
"description": "Generate images using text with the Flux API.",
- "icon": "assets/bfl-ai.svg",
+ "icon": "https://blackforestlabs.ai/wp-content/uploads/2024/07/bfl_logo_retraced_blk.png",
"isAuthRequired": "true",
"authConfig": [
{
@@ -156,14 +156,14 @@
{
"name": "Gemini Image Tools",
"pluginKey": "gemini_image_gen",
+ "toolkit": true,
"description": "Generate high-quality images using Google's Gemini Image Models. Supports Gemini API or Vertex AI.",
"icon": "assets/gemini_image_gen.svg",
"authConfig": [
{
- "authField": "GEMINI_API_KEY||GOOGLE_KEY||GOOGLE_SERVICE_KEY_FILE",
- "label": "Gemini API Key (optional)",
- "description": "Your Google Gemini API Key from Google AI Studio . Leave blank to use Vertex AI with a service account (GOOGLE_SERVICE_KEY_FILE or api/data/auth.json).",
- "optional": true
+ "authField": "GEMINI_API_KEY||GOOGLE_KEY||GEMINI_VERTEX_ENABLED",
+ "label": "Gemini API Key (Optional if Vertex AI is configured)",
+ "description": "Your Google Gemini API Key from Google AI Studio . Leave blank if using Vertex AI with service account."
}
]
}
diff --git a/api/app/clients/tools/structured/DALLE3.js b/api/app/clients/tools/structured/DALLE3.js
index c48db1d764..26610f73ba 100644
--- a/api/app/clients/tools/structured/DALLE3.js
+++ b/api/app/clients/tools/structured/DALLE3.js
@@ -51,10 +51,6 @@ class DALLE3 extends Tool {
this.fileStrategy = fields.fileStrategy;
/** @type {boolean} */
this.isAgent = fields.isAgent;
- if (this.isAgent) {
- /** Ensures LangChain maps [content, artifact] tuple to ToolMessage fields instead of serializing it into content. */
- this.responseFormat = 'content_and_artifact';
- }
if (fields.processFileURL) {
/** @type {processFileURL} Necessary for output to contain all image metadata. */
this.processFileURL = fields.processFileURL.bind(this);
diff --git a/api/app/clients/tools/structured/FluxAPI.js b/api/app/clients/tools/structured/FluxAPI.js
index f8341f7904..56f86a707d 100644
--- a/api/app/clients/tools/structured/FluxAPI.js
+++ b/api/app/clients/tools/structured/FluxAPI.js
@@ -113,10 +113,6 @@ class FluxAPI extends Tool {
/** @type {boolean} **/
this.isAgent = fields.isAgent;
- if (this.isAgent) {
- /** Ensures LangChain maps [content, artifact] tuple to ToolMessage fields instead of serializing it into content. */
- this.responseFormat = 'content_and_artifact';
- }
this.returnMetadata = fields.returnMetadata ?? false;
if (fields.processFileURL) {
@@ -528,40 +524,10 @@ class FluxAPI extends Tool {
return this.returnValue('No image data received from Flux API.');
}
+ // Try saving the image locally
const imageUrl = resultData.sample;
const imageName = `img-${uuidv4()}.png`;
- if (this.isAgent) {
- try {
- const fetchOptions = {};
- if (process.env.PROXY) {
- fetchOptions.agent = new HttpsProxyAgent(process.env.PROXY);
- }
- const imageResponse = await fetch(imageUrl, fetchOptions);
- const arrayBuffer = await imageResponse.arrayBuffer();
- const base64 = Buffer.from(arrayBuffer).toString('base64');
- const content = [
- {
- type: ContentTypes.IMAGE_URL,
- image_url: {
- url: `data:image/png;base64,${base64}`,
- },
- },
- ];
-
- const response = [
- {
- type: ContentTypes.TEXT,
- text: displayMessage,
- },
- ];
- return [response, { content }];
- } catch (error) {
- logger.error('[FluxAPI] Error processing finetuned image for agent:', error);
- return this.returnValue(`Failed to process the finetuned image. ${error.message}`);
- }
- }
-
try {
logger.debug('[FluxAPI] Saving finetuned image:', imageUrl);
const result = await this.processFileURL({
@@ -575,6 +541,12 @@ class FluxAPI extends Tool {
logger.debug('[FluxAPI] Finetuned image saved to path:', result.filepath);
+ // Calculate cost based on endpoint
+ const endpointKey = endpoint.includes('ultra')
+ ? 'FLUX_PRO_1_1_ULTRA_FINETUNED'
+ : 'FLUX_PRO_FINETUNED';
+ const cost = FluxAPI.PRICING[endpointKey] || 0;
+ // Return the result based on returnMetadata flag
this.result = this.returnMetadata ? result : this.wrapInMarkdown(result.filepath);
return this.returnValue(this.result);
} catch (error) {
diff --git a/api/app/clients/tools/structured/GeminiImageGen.js b/api/app/clients/tools/structured/GeminiImageGen.js
index f197f1d41b..c0e5a0ce1d 100644
--- a/api/app/clients/tools/structured/GeminiImageGen.js
+++ b/api/app/clients/tools/structured/GeminiImageGen.js
@@ -1,3 +1,4 @@
+const fs = require('fs');
const path = require('path');
const sharp = require('sharp');
const { v4 } = require('uuid');
@@ -5,7 +6,12 @@ const { ProxyAgent } = require('undici');
const { GoogleGenAI } = require('@google/genai');
const { tool } = require('@langchain/core/tools');
const { logger } = require('@librechat/data-schemas');
-const { ContentTypes, EImageOutputType } = require('librechat-data-provider');
+const {
+ FileContext,
+ ContentTypes,
+ FileSources,
+ EImageOutputType,
+} = require('librechat-data-provider');
const {
geminiToolkit,
loadServiceKey,
@@ -13,7 +19,8 @@ const {
getTransactionsConfig,
} = require('@librechat/api');
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
-const { spendTokens, getFiles } = require('~/models');
+const { spendTokens } = require('~/models/spendTokens');
+const { getFiles } = require('~/models/File');
/**
* Configure proxy support for Google APIs
@@ -52,12 +59,17 @@ const displayMessage =
* @returns {string} - The processed string
*/
function replaceUnwantedChars(inputString) {
- return (
- inputString
- ?.replace(/\r\n|\r|\n/g, ' ')
- .replace(/"/g, '')
- .trim() || ''
- );
+ return inputString?.replace(/[^\w\s\-_.,!?()]/g, '') || '';
+}
+
+/**
+ * Validate and sanitize image format
+ * @param {string} format - The format to validate
+ * @returns {string} - Safe format
+ */
+function getSafeFormat(format) {
+ const allowedFormats = ['png', 'jpg', 'jpeg', 'webp', 'gif'];
+ return allowedFormats.includes(format?.toLowerCase()) ? format.toLowerCase() : 'png';
}
/**
@@ -105,8 +117,11 @@ async function initializeGeminiClient(options = {}) {
return new GoogleGenAI({ apiKey: googleKey });
}
+ // Fall back to Vertex AI with service account
logger.debug('[GeminiImageGen] Using Vertex AI with service account');
const credentialsPath = getDefaultServiceKeyPath();
+
+ // Use loadServiceKey for consistent loading (supports file paths, JSON strings, base64)
const serviceKey = await loadServiceKey(credentialsPath);
if (!serviceKey || !serviceKey.project_id) {
@@ -116,14 +131,75 @@ async function initializeGeminiClient(options = {}) {
);
}
+ // Set GOOGLE_APPLICATION_CREDENTIALS for any Google Cloud SDK dependencies
+ try {
+ await fs.promises.access(credentialsPath);
+ process.env.GOOGLE_APPLICATION_CREDENTIALS = credentialsPath;
+ } catch {
+ // File doesn't exist, skip setting env var
+ }
+
return new GoogleGenAI({
vertexai: true,
project: serviceKey.project_id,
location: process.env.GOOGLE_LOC || process.env.GOOGLE_CLOUD_LOCATION || 'global',
- googleAuthOptions: { credentials: serviceKey },
});
}
+/**
+ * Save image to local filesystem
+ * @param {string} base64Data - Base64 encoded image data
+ * @param {string} format - Image format
+ * @param {string} userId - User ID
+ * @returns {Promise} - The relative URL
+ */
+async function saveImageLocally(base64Data, format, userId) {
+ const safeFormat = getSafeFormat(format);
+ const safeUserId = userId ? path.basename(userId) : 'default';
+ const imageName = `gemini-img-${v4()}.${safeFormat}`;
+ const userDir = path.join(process.cwd(), 'client/public/images', safeUserId);
+
+ await fs.promises.mkdir(userDir, { recursive: true });
+
+ const filePath = path.join(userDir, imageName);
+ await fs.promises.writeFile(filePath, Buffer.from(base64Data, 'base64'));
+
+ logger.debug('[GeminiImageGen] Image saved locally to:', filePath);
+ return `/images/${safeUserId}/${imageName}`;
+}
+
+/**
+ * Save image to cloud storage
+ * @param {Object} params - Parameters
+ * @returns {Promise} - The storage URL or null
+ */
+async function saveToCloudStorage({ base64Data, format, processFileURL, fileStrategy, userId }) {
+ if (!processFileURL || !fileStrategy || !userId) {
+ return null;
+ }
+
+ try {
+ const safeFormat = getSafeFormat(format);
+ const safeUserId = path.basename(userId);
+ const dataURL = `data:image/${safeFormat};base64,${base64Data}`;
+ const imageName = `gemini-img-${v4()}.${safeFormat}`;
+
+ const result = await processFileURL({
+ URL: dataURL,
+ basePath: 'images',
+ userId: safeUserId,
+ fileName: imageName,
+ fileStrategy,
+ context: FileContext.image_generation,
+ });
+
+ return result.filepath;
+ } catch (error) {
+ logger.error('[GeminiImageGen] Error saving to cloud storage:', error);
+ return null;
+ }
+}
+
/**
* Convert image files to Gemini inline data format
* @param {Object} params - Parameters
@@ -250,9 +326,8 @@ function checkForSafetyBlock(response) {
* @param {string} params.userId - The user ID
* @param {string} params.conversationId - The conversation ID
* @param {string} params.model - The model name
- * @param {string} [params.messageId] - The response message ID for transaction correlation
*/
-async function recordTokenUsage({ usageMetadata, req, userId, conversationId, model, messageId }) {
+async function recordTokenUsage({ usageMetadata, req, userId, conversationId, model }) {
if (!usageMetadata) {
logger.debug('[GeminiImageGen] No usage metadata available for balance tracking');
return;
@@ -288,7 +363,6 @@ async function recordTokenUsage({ usageMetadata, req, userId, conversationId, mo
{
user: userId,
model,
- messageId,
conversationId,
context: 'image_generation',
balance,
@@ -316,18 +390,34 @@ function createGeminiImageTool(fields = {}) {
throw new Error('This tool is only available for agents.');
}
- const { req, imageFiles = [], userId, fileStrategy, GEMINI_API_KEY, GOOGLE_KEY } = fields;
+ // Skip validation during tool creation - validation happens at runtime in initializeGeminiClient
+ // This allows the tool to be added to agents when using Vertex AI without requiring API keys
+ // The actual credentials check happens when the tool is invoked
+
+ const {
+ req,
+ imageFiles = [],
+ processFileURL,
+ userId,
+ fileStrategy,
+ GEMINI_API_KEY,
+ GOOGLE_KEY,
+ // GEMINI_VERTEX_ENABLED is used for auth validation only (not used in code)
+ // When set as env var, it signals Vertex AI is configured and bypasses API key requirement
+ } = fields;
const imageOutputType = fields.imageOutputType || EImageOutputType.PNG;
const geminiImageGenTool = tool(
- async ({ prompt, image_ids, aspectRatio, imageSize }, runnableConfig) => {
+ async ({ prompt, image_ids, aspectRatio, imageSize }, _runnableConfig) => {
if (!prompt) {
throw new Error('Missing required field: prompt');
}
- logger.debug('[GeminiImageGen] Generating image', { aspectRatio, imageSize });
+ logger.debug('[GeminiImageGen] Generating image with prompt:', prompt?.substring(0, 100));
+ logger.debug('[GeminiImageGen] Options:', { aspectRatio, imageSize });
+ // Initialize Gemini client with user-provided credentials
let ai;
try {
ai = await initializeGeminiClient({
@@ -342,8 +432,10 @@ function createGeminiImageTool(fields = {}) {
];
}
+ // Build request contents
const contents = [{ text: replaceUnwantedChars(prompt) }];
+ // Add context images if provided
if (image_ids?.length > 0) {
const contextImages = await convertImagesToInlineData({
imageFiles,
@@ -355,34 +447,28 @@ function createGeminiImageTool(fields = {}) {
logger.debug('[GeminiImageGen] Added', contextImages.length, 'context images');
}
+ // Generate image
let apiResponse;
const geminiModel = process.env.GEMINI_IMAGE_MODEL || 'gemini-2.5-flash-image';
- const config = {
- responseModalities: ['TEXT', 'IMAGE'],
- };
-
- const supportsImageSize = !geminiModel.includes('gemini-2.5-flash-image');
- if (aspectRatio || (imageSize && supportsImageSize)) {
- config.imageConfig = {};
- if (aspectRatio) {
- config.imageConfig.aspectRatio = aspectRatio;
- }
- if (imageSize && supportsImageSize) {
- config.imageConfig.imageSize = imageSize;
- }
- }
-
- let derivedSignal = null;
- let abortHandler = null;
-
- if (runnableConfig?.signal) {
- derivedSignal = AbortSignal.any([runnableConfig.signal]);
- abortHandler = () => logger.debug('[GeminiImageGen] Image generation aborted');
- derivedSignal.addEventListener('abort', abortHandler, { once: true });
- config.abortSignal = derivedSignal;
- }
-
try {
+ // Build config with optional imageConfig
+ const config = {
+ responseModalities: ['TEXT', 'IMAGE'],
+ };
+
+ // Add imageConfig if aspectRatio or imageSize is specified
+ // Note: gemini-2.5-flash-image doesn't support imageSize
+ const supportsImageSize = !geminiModel.includes('gemini-2.5-flash-image');
+ if (aspectRatio || (imageSize && supportsImageSize)) {
+ config.imageConfig = {};
+ if (aspectRatio) {
+ config.imageConfig.aspectRatio = aspectRatio;
+ }
+ if (imageSize && supportsImageSize) {
+ config.imageConfig.imageSize = imageSize;
+ }
+ }
+
apiResponse = await ai.models.generateContent({
model: geminiModel,
contents,
@@ -394,12 +480,9 @@ function createGeminiImageTool(fields = {}) {
[{ type: ContentTypes.TEXT, text: `Image generation failed: ${error.message}` }],
{ content: [], file_ids: [] },
];
- } finally {
- if (abortHandler && derivedSignal) {
- derivedSignal.removeEventListener('abort', abortHandler);
- }
}
+ // Check for safety blocks
const safetyBlock = checkForSafetyBlock(apiResponse);
if (safetyBlock) {
logger.warn('[GeminiImageGen] Safety block:', safetyBlock);
@@ -426,7 +509,46 @@ function createGeminiImageTool(fields = {}) {
const imageData = convertedBuffer.toString('base64');
const mimeType = outputFormat === 'jpeg' ? 'image/jpeg' : `image/${outputFormat}`;
+ logger.debug('[GeminiImageGen] Image format:', { outputFormat, mimeType });
+
+ let imageUrl;
+ const useLocalStorage = !fileStrategy || fileStrategy === FileSources.local;
+
+ if (useLocalStorage) {
+ try {
+ imageUrl = await saveImageLocally(imageData, outputFormat, userId);
+ } catch (error) {
+ logger.error('[GeminiImageGen] Local save failed:', error);
+ imageUrl = `data:${mimeType};base64,${imageData}`;
+ }
+ } else {
+ const cloudUrl = await saveToCloudStorage({
+ base64Data: imageData,
+ format: outputFormat,
+ processFileURL,
+ fileStrategy,
+ userId,
+ });
+
+ if (cloudUrl) {
+ imageUrl = cloudUrl;
+ } else {
+ // Fallback to local
+ try {
+ imageUrl = await saveImageLocally(imageData, outputFormat, userId);
+ } catch (_error) {
+ imageUrl = `data:${mimeType};base64,${imageData}`;
+ }
+ }
+ }
+
+ logger.debug('[GeminiImageGen] Image URL:', imageUrl);
+
+ // For the artifact, we need a data URL (same as OpenAI)
+ // The local file save is for persistence, but the response needs a data URL
const dataUrl = `data:${mimeType};base64,${imageData}`;
+
+ // Return in content_and_artifact format (same as OpenAI)
const file_ids = [v4()];
const content = [
{
@@ -445,15 +567,12 @@ function createGeminiImageTool(fields = {}) {
},
];
- const conversationId = runnableConfig?.configurable?.thread_id;
- const messageId =
- runnableConfig?.configurable?.run_id ??
- runnableConfig?.configurable?.requestBody?.messageId;
+ // Record token usage for balance tracking (don't await to avoid blocking response)
+ const conversationId = _runnableConfig?.configurable?.thread_id;
recordTokenUsage({
usageMetadata: apiResponse.usageMetadata,
req,
userId,
- messageId,
conversationId,
model: geminiModel,
}).catch((error) => {
diff --git a/api/app/clients/tools/structured/StableDiffusion.js b/api/app/clients/tools/structured/StableDiffusion.js
index 8cf4b141bb..d7a7a4d96b 100644
--- a/api/app/clients/tools/structured/StableDiffusion.js
+++ b/api/app/clients/tools/structured/StableDiffusion.js
@@ -43,10 +43,6 @@ class StableDiffusionAPI extends Tool {
this.returnMetadata = fields.returnMetadata ?? false;
/** @type {boolean} */
this.isAgent = fields.isAgent;
- if (this.isAgent) {
- /** Ensures LangChain maps [content, artifact] tuple to ToolMessage fields instead of serializing it into content. */
- this.responseFormat = 'content_and_artifact';
- }
if (fields.uploadImageBuffer) {
/** @type {uploadImageBuffer} Necessary for output to contain all image metadata. */
this.uploadImageBuffer = fields.uploadImageBuffer.bind(this);
@@ -119,7 +115,7 @@ class StableDiffusionAPI extends Tool {
generationResponse = await axios.post(`${url}/sdapi/v1/txt2img`, payload);
} catch (error) {
logger.error('[StableDiffusion] Error while generating image:', error);
- return this.returnValue('Error making API request.');
+ return 'Error making API request.';
}
const image = generationResponse.data.images[0];
diff --git a/api/app/clients/tools/structured/specs/DALLE3-proxy.spec.js b/api/app/clients/tools/structured/specs/DALLE3-proxy.spec.js
index 262842b3c2..4481a7d70f 100644
--- a/api/app/clients/tools/structured/specs/DALLE3-proxy.spec.js
+++ b/api/app/clients/tools/structured/specs/DALLE3-proxy.spec.js
@@ -1,6 +1,7 @@
const DALLE3 = require('../DALLE3');
const { ProxyAgent } = require('undici');
+jest.mock('tiktoken');
const processFileURL = jest.fn();
describe('DALLE3 Proxy Configuration', () => {
diff --git a/api/app/clients/tools/structured/specs/DALLE3.spec.js b/api/app/clients/tools/structured/specs/DALLE3.spec.js
index 6071929bfc..d2040989f9 100644
--- a/api/app/clients/tools/structured/specs/DALLE3.spec.js
+++ b/api/app/clients/tools/structured/specs/DALLE3.spec.js
@@ -14,6 +14,15 @@ jest.mock('@librechat/data-schemas', () => {
};
});
+jest.mock('tiktoken', () => {
+ return {
+ encoding_for_model: jest.fn().mockReturnValue({
+ encode: jest.fn(),
+ decode: jest.fn(),
+ }),
+ };
+});
+
const processFileURL = jest.fn();
const generate = jest.fn();
diff --git a/api/app/clients/tools/structured/specs/imageTools-agent.spec.js b/api/app/clients/tools/structured/specs/imageTools-agent.spec.js
deleted file mode 100644
index b82dd87b3f..0000000000
--- a/api/app/clients/tools/structured/specs/imageTools-agent.spec.js
+++ /dev/null
@@ -1,294 +0,0 @@
-/**
- * Regression tests for image tool agent mode — verifies that invoke() returns
- * a ToolMessage with base64 in artifact.content rather than serialized into content.
- *
- * Root cause: DALLE3/FluxAPI/StableDiffusion extend LangChain's Tool but did not
- * set responseFormat = 'content_and_artifact'. LangChain's invoke() would then
- * JSON.stringify the entire [content, artifact] tuple into ToolMessage.content,
- * dumping base64 into token counting and causing context exhaustion.
- */
-
-const axios = require('axios');
-const OpenAI = require('openai');
-const undici = require('undici');
-const fetch = require('node-fetch');
-const { ToolMessage } = require('@langchain/core/messages');
-const { ContentTypes } = require('librechat-data-provider');
-const StableDiffusionAPI = require('../StableDiffusion');
-const FluxAPI = require('../FluxAPI');
-const DALLE3 = require('../DALLE3');
-
-jest.mock('axios');
-jest.mock('openai');
-jest.mock('node-fetch');
-jest.mock('undici', () => ({
- ProxyAgent: jest.fn(),
- fetch: jest.fn(),
-}));
-jest.mock('@librechat/data-schemas', () => ({
- logger: { info: jest.fn(), warn: jest.fn(), debug: jest.fn(), error: jest.fn() },
-}));
-jest.mock('path', () => ({
- resolve: jest.fn(),
- join: jest.fn().mockReturnValue('/mock/path'),
- relative: jest.fn().mockReturnValue('relative/path'),
- extname: jest.fn().mockReturnValue('.png'),
-}));
-jest.mock('fs', () => ({
- existsSync: jest.fn().mockReturnValue(true),
- mkdirSync: jest.fn(),
- promises: { writeFile: jest.fn(), readFile: jest.fn(), unlink: jest.fn() },
-}));
-
-const FAKE_BASE64 = 'aGVsbG8=';
-
-const makeToolCall = (name, args) => ({
- id: 'call_test_123',
- name,
- args,
- type: 'tool_call',
-});
-
-describe('image tools - agent mode ToolMessage format', () => {
- const ENV_KEYS = ['DALLE_API_KEY', 'FLUX_API_KEY', 'SD_WEBUI_URL', 'PROXY'];
- let savedEnv = {};
-
- beforeEach(() => {
- jest.clearAllMocks();
- for (const key of ENV_KEYS) {
- savedEnv[key] = process.env[key];
- }
- process.env.DALLE_API_KEY = 'test-dalle-key';
- process.env.FLUX_API_KEY = 'test-flux-key';
- process.env.SD_WEBUI_URL = 'http://localhost:7860';
- delete process.env.PROXY;
- });
-
- afterEach(() => {
- for (const key of ENV_KEYS) {
- if (savedEnv[key] === undefined) {
- delete process.env[key];
- } else {
- process.env[key] = savedEnv[key];
- }
- }
- savedEnv = {};
- });
-
- describe('DALLE3', () => {
- beforeEach(() => {
- OpenAI.mockImplementation(() => ({
- images: {
- generate: jest.fn().mockResolvedValue({
- data: [{ url: 'https://example.com/image.png' }],
- }),
- },
- }));
- undici.fetch.mockResolvedValue({
- arrayBuffer: () => Promise.resolve(Buffer.from(FAKE_BASE64, 'base64')),
- });
- });
-
- it('sets responseFormat to content_and_artifact when isAgent is true', () => {
- const dalle = new DALLE3({ isAgent: true });
- expect(dalle.responseFormat).toBe('content_and_artifact');
- });
-
- it('does not set responseFormat when isAgent is false', () => {
- const dalle = new DALLE3({ isAgent: false, processFileURL: jest.fn() });
- expect(dalle.responseFormat).not.toBe('content_and_artifact');
- });
-
- it('invoke() returns ToolMessage with base64 in artifact, not serialized in content', async () => {
- const dalle = new DALLE3({ isAgent: true });
- const result = await dalle.invoke(
- makeToolCall('dalle', {
- prompt: 'a box',
- quality: 'standard',
- size: '1024x1024',
- style: 'vivid',
- }),
- );
-
- expect(result).toBeInstanceOf(ToolMessage);
-
- const contentStr =
- typeof result.content === 'string' ? result.content : JSON.stringify(result.content);
- expect(contentStr).not.toContain(FAKE_BASE64);
-
- expect(result.artifact).toBeDefined();
- const artifactContent = result.artifact?.content;
- expect(Array.isArray(artifactContent)).toBe(true);
- expect(artifactContent[0].type).toBe(ContentTypes.IMAGE_URL);
- expect(artifactContent[0].image_url.url).toContain('base64');
- });
-
- it('invoke() returns ToolMessage with error string in content when API fails', async () => {
- OpenAI.mockImplementation(() => ({
- images: { generate: jest.fn().mockRejectedValue(new Error('API error')) },
- }));
-
- const dalle = new DALLE3({ isAgent: true });
- const result = await dalle.invoke(
- makeToolCall('dalle', {
- prompt: 'a box',
- quality: 'standard',
- size: '1024x1024',
- style: 'vivid',
- }),
- );
-
- expect(result).toBeInstanceOf(ToolMessage);
- const contentStr =
- typeof result.content === 'string' ? result.content : JSON.stringify(result.content);
- expect(contentStr).toContain('Something went wrong');
- expect(result.artifact).toBeDefined();
- });
- });
-
- describe('FluxAPI', () => {
- beforeEach(() => {
- jest.useFakeTimers();
- axios.post.mockResolvedValue({ data: { id: 'task-123' } });
- axios.get.mockResolvedValue({
- data: { status: 'Ready', result: { sample: 'https://example.com/image.png' } },
- });
- fetch.mockResolvedValue({
- arrayBuffer: () => Promise.resolve(Buffer.from(FAKE_BASE64, 'base64')),
- });
- });
-
- afterEach(() => {
- jest.useRealTimers();
- });
-
- it('sets responseFormat to content_and_artifact when isAgent is true', () => {
- const flux = new FluxAPI({ isAgent: true });
- expect(flux.responseFormat).toBe('content_and_artifact');
- });
-
- it('does not set responseFormat when isAgent is false', () => {
- const flux = new FluxAPI({ isAgent: false, processFileURL: jest.fn() });
- expect(flux.responseFormat).not.toBe('content_and_artifact');
- });
-
- it('invoke() returns ToolMessage with base64 in artifact, not serialized in content', async () => {
- const flux = new FluxAPI({ isAgent: true });
- const invokePromise = flux.invoke(
- makeToolCall('flux', { prompt: 'a box', endpoint: '/v1/flux-dev' }),
- );
- await jest.runAllTimersAsync();
- const result = await invokePromise;
-
- expect(result).toBeInstanceOf(ToolMessage);
- const contentStr =
- typeof result.content === 'string' ? result.content : JSON.stringify(result.content);
- expect(contentStr).not.toContain(FAKE_BASE64);
-
- expect(result.artifact).toBeDefined();
- const artifactContent = result.artifact?.content;
- expect(Array.isArray(artifactContent)).toBe(true);
- expect(artifactContent[0].type).toBe(ContentTypes.IMAGE_URL);
- expect(artifactContent[0].image_url.url).toContain('base64');
- });
-
- it('invoke() returns ToolMessage with base64 in artifact for generate_finetuned action', async () => {
- const flux = new FluxAPI({ isAgent: true });
- const invokePromise = flux.invoke(
- makeToolCall('flux', {
- action: 'generate_finetuned',
- prompt: 'a box',
- finetune_id: 'ft-abc123',
- endpoint: '/v1/flux-pro-finetuned',
- }),
- );
- await jest.runAllTimersAsync();
- const result = await invokePromise;
-
- expect(result).toBeInstanceOf(ToolMessage);
- const contentStr =
- typeof result.content === 'string' ? result.content : JSON.stringify(result.content);
- expect(contentStr).not.toContain(FAKE_BASE64);
-
- expect(result.artifact).toBeDefined();
- const artifactContent = result.artifact?.content;
- expect(Array.isArray(artifactContent)).toBe(true);
- expect(artifactContent[0].type).toBe(ContentTypes.IMAGE_URL);
- expect(artifactContent[0].image_url.url).toContain('base64');
- });
-
- it('invoke() returns ToolMessage with error string in content when task submission fails', async () => {
- axios.post.mockRejectedValue(new Error('Network error'));
-
- const flux = new FluxAPI({ isAgent: true });
- const invokePromise = flux.invoke(
- makeToolCall('flux', { prompt: 'a box', endpoint: '/v1/flux-dev' }),
- );
- await jest.runAllTimersAsync();
- const result = await invokePromise;
-
- expect(result).toBeInstanceOf(ToolMessage);
- const contentStr =
- typeof result.content === 'string' ? result.content : JSON.stringify(result.content);
- expect(contentStr).toContain('Something went wrong');
- expect(result.artifact).toBeDefined();
- });
- });
-
- describe('StableDiffusion', () => {
- beforeEach(() => {
- axios.post.mockResolvedValue({
- data: {
- images: [FAKE_BASE64],
- info: JSON.stringify({ height: 1024, width: 1024, seed: 42, infotexts: [] }),
- },
- });
- });
-
- it('sets responseFormat to content_and_artifact when isAgent is true', () => {
- const sd = new StableDiffusionAPI({ isAgent: true, override: true });
- expect(sd.responseFormat).toBe('content_and_artifact');
- });
-
- it('does not set responseFormat when isAgent is false', () => {
- const sd = new StableDiffusionAPI({
- isAgent: false,
- override: true,
- uploadImageBuffer: jest.fn(),
- });
- expect(sd.responseFormat).not.toBe('content_and_artifact');
- });
-
- it('invoke() returns ToolMessage with base64 in artifact, not serialized in content', async () => {
- const sd = new StableDiffusionAPI({ isAgent: true, override: true, userId: 'user-1' });
- const result = await sd.invoke(
- makeToolCall('stable-diffusion', { prompt: 'a box', negative_prompt: '' }),
- );
-
- expect(result).toBeInstanceOf(ToolMessage);
- const contentStr =
- typeof result.content === 'string' ? result.content : JSON.stringify(result.content);
- expect(contentStr).not.toContain(FAKE_BASE64);
-
- expect(result.artifact).toBeDefined();
- const artifactContent = result.artifact?.content;
- expect(Array.isArray(artifactContent)).toBe(true);
- expect(artifactContent[0].type).toBe(ContentTypes.IMAGE_URL);
- expect(artifactContent[0].image_url.url).toContain('base64');
- });
-
- it('invoke() returns ToolMessage with error string in content when API fails', async () => {
- axios.post.mockRejectedValue(new Error('Connection refused'));
-
- const sd = new StableDiffusionAPI({ isAgent: true, override: true, userId: 'user-1' });
- const result = await sd.invoke(
- makeToolCall('stable-diffusion', { prompt: 'a box', negative_prompt: '' }),
- );
-
- expect(result).toBeInstanceOf(ToolMessage);
- const contentStr =
- typeof result.content === 'string' ? result.content : JSON.stringify(result.content);
- expect(contentStr).toContain('Error making API request');
- });
- });
-});
diff --git a/api/app/clients/tools/util/handleTools.js b/api/app/clients/tools/util/handleTools.js
index 8adb43f945..65c88ce83f 100644
--- a/api/app/clients/tools/util/handleTools.js
+++ b/api/app/clients/tools/util/handleTools.js
@@ -7,13 +7,13 @@ const {
} = require('@librechat/agents');
const {
checkAccess,
- toolkitParent,
createSafeUser,
mcpToolPattern,
loadWebSearchAuth,
buildImageToolContext,
buildWebSearchContext,
} = require('@librechat/api');
+const { getMCPServersRegistry } = require('~/config');
const {
Tools,
Constants,
@@ -38,14 +38,13 @@ const {
createGeminiImageTool,
createOpenAIImageTools,
} = require('../');
-const { createMCPTool, createMCPTools, resolveConfigServers } = require('~/server/services/MCP');
-const { createFileSearchTool, primeFiles: primeSearchFiles } = require('./fileSearch');
const { primeFiles: primeCodeFiles } = require('~/server/services/Files/Code/process');
+const { createFileSearchTool, primeFiles: primeSearchFiles } = require('./fileSearch');
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
+const { createMCPTool, createMCPTools } = require('~/server/services/MCP');
const { loadAuthValues } = require('~/server/services/Tools/credentials');
const { getMCPServerTools } = require('~/server/services/Config');
-const { getMCPServersRegistry } = require('~/config');
-const { getRoleByName } = require('~/models');
+const { getRoleByName } = require('~/models/Role');
/**
* Validates the availability and authentication of tools for a user based on environment variables or user-specific plugin authentication values.
@@ -208,7 +207,7 @@ const loadTools = async ({
},
gemini_image_gen: async (toolContextMap) => {
const authFields = getAuthFields('gemini_image_gen');
- const authValues = await loadAuthValues({ userId: user, authFields, throwError: false });
+ const authValues = await loadAuthValues({ userId: user, authFields });
const imageFiles = options.tool_resources?.[EToolResources.image_edit]?.files ?? [];
const toolContext = buildImageToolContext({
imageFiles,
@@ -223,6 +222,7 @@ const loadTools = async ({
isAgent: !!agent,
req: options.req,
imageFiles,
+ processFileURL: options.processFileURL,
userId: user,
fileStrategy,
});
@@ -256,12 +256,6 @@ const loadTools = async ({
const toolContextMap = {};
const requestedMCPTools = {};
- /** Resolve config-source servers for the current user/tenant context */
- let configServers;
- if (tools.some((tool) => tool && mcpToolPattern.test(tool))) {
- configServers = await resolveConfigServers(options.req);
- }
-
for (const tool of tools) {
if (tool === Tools.execute_code) {
requestedTools[tool] = async () => {
@@ -347,7 +341,7 @@ const loadTools = async ({
continue;
}
const serverConfig = serverName
- ? await getMCPServersRegistry().getServerConfig(serverName, user, configServers)
+ ? await getMCPServersRegistry().getServerConfig(serverName, user)
: null;
if (!serverConfig) {
logger.warn(
@@ -376,16 +370,8 @@ const loadTools = async ({
continue;
}
- const toolKey = customConstructors[tool] ? tool : toolkitParent[tool];
- if (toolKey && customConstructors[toolKey]) {
- if (!requestedTools[toolKey]) {
- let cached;
- requestedTools[toolKey] = async () => {
- cached ??= customConstructors[toolKey](toolContextMap);
- return cached;
- };
- }
- requestedTools[tool] = requestedTools[toolKey];
+ if (customConstructors[tool]) {
+ requestedTools[tool] = async () => customConstructors[tool](toolContextMap);
continue;
}
@@ -425,7 +411,6 @@ const loadTools = async ({
let index = -1;
const failedMCPServers = new Set();
const safeUser = createSafeUser(options.req?.user);
-
for (const [serverName, toolConfigs] of Object.entries(requestedMCPTools)) {
index++;
/** @type {LCAvailableTools} */
@@ -440,7 +425,6 @@ const loadTools = async ({
signal,
user: safeUser,
userMCPAuthMap,
- configServers,
res: options.res,
streamId: options.req?._resumableStreamId || null,
model: agent?.model ?? model,
diff --git a/api/cache/banViolation.js b/api/cache/banViolation.js
index 36945ca420..4d321889c1 100644
--- a/api/cache/banViolation.js
+++ b/api/cache/banViolation.js
@@ -1,7 +1,8 @@
const { logger } = require('@librechat/data-schemas');
+const { isEnabled, math } = require('@librechat/api');
const { ViolationTypes } = require('librechat-data-provider');
-const { isEnabled, math, removePorts } = require('@librechat/api');
const { deleteAllUserSessions } = require('~/models');
+const { removePorts } = require('~/server/utils');
const getLogStores = require('./getLogStores');
const { BAN_VIOLATIONS, BAN_INTERVAL } = process.env ?? {};
diff --git a/api/cache/getLogStores.js b/api/cache/getLogStores.js
index 70eb681e53..3089192196 100644
--- a/api/cache/getLogStores.js
+++ b/api/cache/getLogStores.js
@@ -47,7 +47,7 @@ const namespaces = {
[CacheKeys.MODEL_QUERIES]: standardCache(CacheKeys.MODEL_QUERIES),
[CacheKeys.AUDIO_RUNS]: standardCache(CacheKeys.AUDIO_RUNS, Time.TEN_MINUTES),
[CacheKeys.MESSAGES]: standardCache(CacheKeys.MESSAGES, Time.ONE_MINUTE),
- [CacheKeys.FLOWS]: standardCache(CacheKeys.FLOWS, Time.ONE_MINUTE * 10),
+ [CacheKeys.FLOWS]: standardCache(CacheKeys.FLOWS, Time.ONE_MINUTE * 3),
[CacheKeys.OPENID_EXCHANGED_TOKENS]: standardCache(
CacheKeys.OPENID_EXCHANGED_TOKENS,
Time.TEN_MINUTES,
diff --git a/api/db/index.js b/api/db/index.js
index f4359c8adf..5c29902f69 100644
--- a/api/db/index.js
+++ b/api/db/index.js
@@ -1,13 +1,8 @@
const mongoose = require('mongoose');
const { createModels } = require('@librechat/data-schemas');
const { connectDb } = require('./connect');
-
-// createModels MUST run before requiring indexSync.
-// indexSync.js captures mongoose.models.Message and mongoose.models.Conversation
-// at module load time. If those models are not registered first, all MeiliSearch
-// sync operations will silently fail on every startup.
-createModels(mongoose);
-
const indexSync = require('./indexSync');
+createModels(mongoose);
+
module.exports = { connectDb, indexSync };
diff --git a/api/db/index.spec.js b/api/db/index.spec.js
deleted file mode 100644
index e1ebe176dc..0000000000
--- a/api/db/index.spec.js
+++ /dev/null
@@ -1,26 +0,0 @@
-describe('api/db/index.js', () => {
- test('createModels is called before indexSync is loaded', () => {
- jest.resetModules();
-
- const callOrder = [];
-
- jest.mock('@librechat/data-schemas', () => ({
- createModels: jest.fn((m) => {
- callOrder.push('createModels');
- m.models.Message = { name: 'Message' };
- m.models.Conversation = { name: 'Conversation' };
- }),
- }));
-
- jest.mock('./indexSync', () => {
- callOrder.push('indexSync');
- return jest.fn();
- });
-
- jest.mock('./connect', () => ({ connectDb: jest.fn() }));
-
- require('./index');
-
- expect(callOrder).toEqual(['createModels', 'indexSync']);
- });
-});
diff --git a/api/db/indexSync.js b/api/db/indexSync.js
index 13059033fb..8e8e999d92 100644
--- a/api/db/indexSync.js
+++ b/api/db/indexSync.js
@@ -6,6 +6,9 @@ const { isEnabled, FlowStateManager } = require('@librechat/api');
const { getLogStores } = require('~/cache');
const { batchResetMeiliFlags } = require('./utils');
+const Conversation = mongoose.models.Conversation;
+const Message = mongoose.models.Message;
+
const searchEnabled = isEnabled(process.env.SEARCH);
const indexingDisabled = isEnabled(process.env.MEILI_NO_SYNC);
let currentTimeout = null;
@@ -197,14 +200,6 @@ async function performSync(flowManager, flowId, flowType) {
return { messagesSync: false, convosSync: false };
}
- const Message = mongoose.models.Message;
- const Conversation = mongoose.models.Conversation;
- if (!Message || !Conversation) {
- throw new Error(
- '[indexSync] Models not registered. Ensure createModels() has been called before indexSync.',
- );
- }
-
const client = MeiliSearchClient.getInstance();
const { status } = await client.health();
@@ -241,12 +236,8 @@ async function performSync(flowManager, flowId, flowType) {
const messageCount = messageProgress.totalDocuments;
const messagesIndexed = messageProgress.totalProcessed;
const unindexedMessages = messageCount - messagesIndexed;
- const noneIndexed = messagesIndexed === 0 && unindexedMessages > 0;
- if (settingsUpdated || noneIndexed || unindexedMessages > syncThreshold) {
- if (noneIndexed && !settingsUpdated) {
- logger.info('[indexSync] No messages marked as indexed, forcing full sync');
- }
+ if (settingsUpdated || unindexedMessages > syncThreshold) {
logger.info(`[indexSync] Starting message sync (${unindexedMessages} unindexed)`);
await Message.syncWithMeili();
messagesSync = true;
@@ -270,13 +261,9 @@ async function performSync(flowManager, flowId, flowType) {
const convoCount = convoProgress.totalDocuments;
const convosIndexed = convoProgress.totalProcessed;
- const unindexedConvos = convoCount - convosIndexed;
- const noneConvosIndexed = convosIndexed === 0 && unindexedConvos > 0;
- if (settingsUpdated || noneConvosIndexed || unindexedConvos > syncThreshold) {
- if (noneConvosIndexed && !settingsUpdated) {
- logger.info('[indexSync] No conversations marked as indexed, forcing full sync');
- }
+ const unindexedConvos = convoCount - convosIndexed;
+ if (settingsUpdated || unindexedConvos > syncThreshold) {
logger.info(`[indexSync] Starting convos sync (${unindexedConvos} unindexed)`);
await Conversation.syncWithMeili();
convosSync = true;
@@ -354,13 +341,6 @@ async function indexSync() {
logger.debug('[indexSync] Creating indices...');
currentTimeout = setTimeout(async () => {
try {
- const Message = mongoose.models.Message;
- const Conversation = mongoose.models.Conversation;
- if (!Message || !Conversation) {
- throw new Error(
- '[indexSync] Models not registered. Ensure createModels() has been called before indexSync.',
- );
- }
await Message.syncWithMeili();
await Conversation.syncWithMeili();
} catch (err) {
diff --git a/api/db/indexSync.spec.js b/api/db/indexSync.spec.js
index dbe07c7595..c2e5901d6a 100644
--- a/api/db/indexSync.spec.js
+++ b/api/db/indexSync.spec.js
@@ -462,69 +462,4 @@ describe('performSync() - syncThreshold logic', () => {
);
expect(mockLogger.info).toHaveBeenCalledWith('[indexSync] Starting convos sync (50 unindexed)');
});
-
- test('forces sync when zero documents indexed (reset scenario) even if below threshold', async () => {
- Message.getSyncProgress.mockResolvedValue({
- totalProcessed: 0,
- totalDocuments: 680,
- isComplete: false,
- });
-
- Conversation.getSyncProgress.mockResolvedValue({
- totalProcessed: 0,
- totalDocuments: 76,
- isComplete: false,
- });
-
- Message.syncWithMeili.mockResolvedValue(undefined);
- Conversation.syncWithMeili.mockResolvedValue(undefined);
-
- const indexSync = require('./indexSync');
- await indexSync();
-
- expect(Message.syncWithMeili).toHaveBeenCalledTimes(1);
- expect(Conversation.syncWithMeili).toHaveBeenCalledTimes(1);
- expect(mockLogger.info).toHaveBeenCalledWith(
- '[indexSync] No messages marked as indexed, forcing full sync',
- );
- expect(mockLogger.info).toHaveBeenCalledWith(
- '[indexSync] Starting message sync (680 unindexed)',
- );
- expect(mockLogger.info).toHaveBeenCalledWith(
- '[indexSync] No conversations marked as indexed, forcing full sync',
- );
- expect(mockLogger.info).toHaveBeenCalledWith('[indexSync] Starting convos sync (76 unindexed)');
- });
-
- test('does NOT force sync when some documents already indexed and below threshold', async () => {
- Message.getSyncProgress.mockResolvedValue({
- totalProcessed: 630,
- totalDocuments: 680,
- isComplete: false,
- });
-
- Conversation.getSyncProgress.mockResolvedValue({
- totalProcessed: 70,
- totalDocuments: 76,
- isComplete: false,
- });
-
- const indexSync = require('./indexSync');
- await indexSync();
-
- expect(Message.syncWithMeili).not.toHaveBeenCalled();
- expect(Conversation.syncWithMeili).not.toHaveBeenCalled();
- expect(mockLogger.info).not.toHaveBeenCalledWith(
- '[indexSync] No messages marked as indexed, forcing full sync',
- );
- expect(mockLogger.info).not.toHaveBeenCalledWith(
- '[indexSync] No conversations marked as indexed, forcing full sync',
- );
- expect(mockLogger.info).toHaveBeenCalledWith(
- '[indexSync] 50 messages unindexed (below threshold: 1000, skipping)',
- );
- expect(mockLogger.info).toHaveBeenCalledWith(
- '[indexSync] 6 convos unindexed (below threshold: 1000, skipping)',
- );
- });
});
diff --git a/api/jest.config.js b/api/jest.config.js
index 47f8b7287b..20ee3c6aed 100644
--- a/api/jest.config.js
+++ b/api/jest.config.js
@@ -3,13 +3,12 @@ module.exports = {
clearMocks: true,
roots: [''],
coverageDirectory: 'coverage',
- maxWorkers: '50%',
testTimeout: 30000, // 30 seconds timeout for all tests
setupFiles: ['./test/jestSetup.js', './test/__mocks__/logger.js'],
moduleNameMapper: {
'~/(.*)': '/$1',
'~/data/auth.json': '/__mocks__/auth.mock.json',
- '^openid-client/passport$': '/test/__mocks__/openid-client-passport.js',
+ '^openid-client/passport$': '/test/__mocks__/openid-client-passport.js', // Mock for the passport strategy part
'^openid-client$': '/test/__mocks__/openid-client.js',
},
transformIgnorePatterns: ['/node_modules/(?!(openid-client|oauth4webapi|jose)/).*/'],
diff --git a/api/models/Action.js b/api/models/Action.js
new file mode 100644
index 0000000000..20aa20a7e4
--- /dev/null
+++ b/api/models/Action.js
@@ -0,0 +1,77 @@
+const { Action } = require('~/db/models');
+
+/**
+ * Update an action with new data without overwriting existing properties,
+ * or create a new action if it doesn't exist.
+ *
+ * @param {Object} searchParams - The search parameters to find the action to update.
+ * @param {string} searchParams.action_id - The ID of the action to update.
+ * @param {string} searchParams.user - The user ID of the action's author.
+ * @param {Object} updateData - An object containing the properties to update.
+ * @returns {Promise} The updated or newly created action document as a plain object.
+ */
+const updateAction = async (searchParams, updateData) => {
+ const options = { new: true, upsert: true };
+ return await Action.findOneAndUpdate(searchParams, updateData, options).lean();
+};
+
+/**
+ * Retrieves all actions that match the given search parameters.
+ *
+ * @param {Object} searchParams - The search parameters to find matching actions.
+ * @param {boolean} includeSensitive - Flag to include sensitive data in the metadata.
+ * @returns {Promise>} A promise that resolves to an array of action documents as plain objects.
+ */
+const getActions = async (searchParams, includeSensitive = false) => {
+ const actions = await Action.find(searchParams).lean();
+
+ if (!includeSensitive) {
+ for (let i = 0; i < actions.length; i++) {
+ const metadata = actions[i].metadata;
+ if (!metadata) {
+ continue;
+ }
+
+ const sensitiveFields = ['api_key', 'oauth_client_id', 'oauth_client_secret'];
+ for (let field of sensitiveFields) {
+ if (metadata[field]) {
+ delete metadata[field];
+ }
+ }
+ }
+ }
+
+ return actions;
+};
+
+/**
+ * Deletes an action by params.
+ *
+ * @param {Object} searchParams - The search parameters to find the action to delete.
+ * @param {string} searchParams.action_id - The ID of the action to delete.
+ * @param {string} searchParams.user - The user ID of the action's author.
+ * @returns {Promise} A promise that resolves to the deleted action document as a plain object, or null if no document was found.
+ */
+const deleteAction = async (searchParams) => {
+ return await Action.findOneAndDelete(searchParams).lean();
+};
+
+/**
+ * Deletes actions by params.
+ *
+ * @param {Object} searchParams - The search parameters to find the actions to delete.
+ * @param {string} searchParams.action_id - The ID of the action(s) to delete.
+ * @param {string} searchParams.user - The user ID of the action's author.
+ * @returns {Promise} A promise that resolves to the number of deleted action documents.
+ */
+const deleteActions = async (searchParams) => {
+ const result = await Action.deleteMany(searchParams);
+ return result.deletedCount;
+};
+
+module.exports = {
+ getActions,
+ updateAction,
+ deleteAction,
+ deleteActions,
+};
diff --git a/api/models/Agent.js b/api/models/Agent.js
new file mode 100644
index 0000000000..663285183a
--- /dev/null
+++ b/api/models/Agent.js
@@ -0,0 +1,931 @@
+const mongoose = require('mongoose');
+const crypto = require('node:crypto');
+const { logger } = require('@librechat/data-schemas');
+const { getCustomEndpointConfig } = require('@librechat/api');
+const {
+ Tools,
+ SystemRoles,
+ ResourceType,
+ actionDelimiter,
+ isAgentsEndpoint,
+ isEphemeralAgentId,
+ encodeEphemeralAgentId,
+} = require('librechat-data-provider');
+const { mcp_all, mcp_delimiter } = require('librechat-data-provider').Constants;
+const {
+ removeAgentFromAllProjects,
+ removeAgentIdsFromProject,
+ addAgentIdsToProject,
+} = require('./Project');
+const { removeAllPermissions } = require('~/server/services/PermissionService');
+const { getMCPServerTools } = require('~/server/services/Config');
+const { Agent, AclEntry, User } = require('~/db/models');
+const { getActions } = require('./Action');
+
+/**
+ * Extracts unique MCP server names from tools array
+ * Tools format: "toolName_mcp_serverName" or "sys__server__sys_mcp_serverName"
+ * @param {string[]} tools - Array of tool identifiers
+ * @returns {string[]} Array of unique MCP server names
+ */
+const extractMCPServerNames = (tools) => {
+ if (!tools || !Array.isArray(tools)) {
+ return [];
+ }
+ const serverNames = new Set();
+ for (const tool of tools) {
+ if (!tool || !tool.includes(mcp_delimiter)) {
+ continue;
+ }
+ const parts = tool.split(mcp_delimiter);
+ if (parts.length >= 2) {
+ serverNames.add(parts[parts.length - 1]);
+ }
+ }
+ return Array.from(serverNames);
+};
+
+/**
+ * Create an agent with the provided data.
+ * @param {Object} agentData - The agent data to create.
+ * @returns {Promise} The created agent document as a plain object.
+ * @throws {Error} If the agent creation fails.
+ */
+const createAgent = async (agentData) => {
+ const { author: _author, ...versionData } = agentData;
+ const timestamp = new Date();
+ const initialAgentData = {
+ ...agentData,
+ versions: [
+ {
+ ...versionData,
+ createdAt: timestamp,
+ updatedAt: timestamp,
+ },
+ ],
+ category: agentData.category || 'general',
+ mcpServerNames: extractMCPServerNames(agentData.tools),
+ };
+
+ return (await Agent.create(initialAgentData)).toObject();
+};
+
+/**
+ * Get an agent document based on the provided ID.
+ *
+ * @param {Object} searchParameter - The search parameters to find the agent to update.
+ * @param {string} searchParameter.id - The ID of the agent to update.
+ * @param {string} searchParameter.author - The user ID of the agent's author.
+ * @returns {Promise} The agent document as a plain object, or null if not found.
+ */
+const getAgent = async (searchParameter) => await Agent.findOne(searchParameter).lean();
+
+/**
+ * Get multiple agent documents based on the provided search parameters.
+ *
+ * @param {Object} searchParameter - The search parameters to find agents.
+ * @returns {Promise} Array of agent documents as plain objects.
+ */
+const getAgents = async (searchParameter) => await Agent.find(searchParameter).lean();
+
+/**
+ * Load an agent based on the provided ID
+ *
+ * @param {Object} params
+ * @param {ServerRequest} params.req
+ * @param {string} params.spec
+ * @param {string} params.agent_id
+ * @param {string} params.endpoint
+ * @param {import('@librechat/agents').ClientOptions} [params.model_parameters]
+ * @returns {Promise} The agent document as a plain object, or null if not found.
+ */
+const loadEphemeralAgent = async ({ req, spec, endpoint, model_parameters: _m }) => {
+ const { model, ...model_parameters } = _m;
+ const modelSpecs = req.config?.modelSpecs?.list;
+ /** @type {TModelSpec | null} */
+ let modelSpec = null;
+ if (spec != null && spec !== '') {
+ modelSpec = modelSpecs?.find((s) => s.name === spec) || null;
+ }
+ /** @type {TEphemeralAgent | null} */
+ const ephemeralAgent = req.body.ephemeralAgent;
+ const mcpServers = new Set(ephemeralAgent?.mcp);
+ const userId = req.user?.id; // note: userId cannot be undefined at runtime
+ if (modelSpec?.mcpServers) {
+ for (const mcpServer of modelSpec.mcpServers) {
+ mcpServers.add(mcpServer);
+ }
+ }
+ /** @type {string[]} */
+ const tools = [];
+ if (ephemeralAgent?.execute_code === true || modelSpec?.executeCode === true) {
+ tools.push(Tools.execute_code);
+ }
+ if (ephemeralAgent?.file_search === true || modelSpec?.fileSearch === true) {
+ tools.push(Tools.file_search);
+ }
+ if (ephemeralAgent?.web_search === true || modelSpec?.webSearch === true) {
+ tools.push(Tools.web_search);
+ }
+
+ const addedServers = new Set();
+ if (mcpServers.size > 0) {
+ for (const mcpServer of mcpServers) {
+ if (addedServers.has(mcpServer)) {
+ continue;
+ }
+ const serverTools = await getMCPServerTools(userId, mcpServer);
+ if (!serverTools) {
+ tools.push(`${mcp_all}${mcp_delimiter}${mcpServer}`);
+ addedServers.add(mcpServer);
+ continue;
+ }
+ tools.push(...Object.keys(serverTools));
+ addedServers.add(mcpServer);
+ }
+ }
+
+ const instructions = req.body.promptPrefix;
+
+ // Get endpoint config for modelDisplayLabel fallback
+ const appConfig = req.config;
+ let endpointConfig = appConfig?.endpoints?.[endpoint];
+ if (!isAgentsEndpoint(endpoint) && !endpointConfig) {
+ try {
+ endpointConfig = getCustomEndpointConfig({ endpoint, appConfig });
+ } catch (err) {
+ logger.error('[loadEphemeralAgent] Error getting custom endpoint config', err);
+ }
+ }
+
+ // For ephemeral agents, use modelLabel if provided, then model spec's label,
+ // then modelDisplayLabel from endpoint config, otherwise empty string to show model name
+ const sender =
+ model_parameters?.modelLabel ?? modelSpec?.label ?? endpointConfig?.modelDisplayLabel ?? '';
+
+ // Encode ephemeral agent ID with endpoint, model, and computed sender for display
+ const ephemeralId = encodeEphemeralAgentId({ endpoint, model, sender });
+
+ const result = {
+ id: ephemeralId,
+ instructions,
+ provider: endpoint,
+ model_parameters,
+ model,
+ tools,
+ };
+
+ if (ephemeralAgent?.artifacts != null && ephemeralAgent.artifacts) {
+ result.artifacts = ephemeralAgent.artifacts;
+ }
+ return result;
+};
+
+/**
+ * Load an agent based on the provided ID
+ *
+ * @param {Object} params
+ * @param {ServerRequest} params.req
+ * @param {string} params.spec
+ * @param {string} params.agent_id
+ * @param {string} params.endpoint
+ * @param {import('@librechat/agents').ClientOptions} [params.model_parameters]
+ * @returns {Promise} The agent document as a plain object, or null if not found.
+ */
+const loadAgent = async ({ req, spec, agent_id, endpoint, model_parameters }) => {
+ if (!agent_id) {
+ return null;
+ }
+ if (isEphemeralAgentId(agent_id)) {
+ return await loadEphemeralAgent({ req, spec, endpoint, model_parameters });
+ }
+ const agent = await getAgent({
+ id: agent_id,
+ });
+
+ if (!agent) {
+ return null;
+ }
+
+ agent.version = agent.versions ? agent.versions.length : 0;
+ return agent;
+};
+
+/**
+ * Check if a version already exists in the versions array, excluding timestamp and author fields
+ * @param {Object} updateData - The update data to compare
+ * @param {Object} currentData - The current agent data
+ * @param {Array} versions - The existing versions array
+ * @param {string} [actionsHash] - Hash of current action metadata
+ * @returns {Object|null} - The matching version if found, null otherwise
+ */
+const isDuplicateVersion = (updateData, currentData, versions, actionsHash = null) => {
+ if (!versions || versions.length === 0) {
+ return null;
+ }
+
+ const excludeFields = [
+ '_id',
+ 'id',
+ 'createdAt',
+ 'updatedAt',
+ 'author',
+ 'updatedBy',
+ 'created_at',
+ 'updated_at',
+ '__v',
+ 'versions',
+ 'actionsHash', // Exclude actionsHash from direct comparison
+ ];
+
+ const { $push: _$push, $pull: _$pull, $addToSet: _$addToSet, ...directUpdates } = updateData;
+
+ if (Object.keys(directUpdates).length === 0 && !actionsHash) {
+ return null;
+ }
+
+ const wouldBeVersion = { ...currentData, ...directUpdates };
+ const lastVersion = versions[versions.length - 1];
+
+ if (actionsHash && lastVersion.actionsHash !== actionsHash) {
+ return null;
+ }
+
+ const allFields = new Set([...Object.keys(wouldBeVersion), ...Object.keys(lastVersion)]);
+
+ const importantFields = Array.from(allFields).filter((field) => !excludeFields.includes(field));
+
+ let isMatch = true;
+ for (const field of importantFields) {
+ const wouldBeValue = wouldBeVersion[field];
+ const lastVersionValue = lastVersion[field];
+
+ // Skip if both are undefined/null
+ if (!wouldBeValue && !lastVersionValue) {
+ continue;
+ }
+
+ // Handle arrays
+ if (Array.isArray(wouldBeValue) || Array.isArray(lastVersionValue)) {
+ // Normalize: treat undefined/null as empty array for comparison
+ let wouldBeArr;
+ if (Array.isArray(wouldBeValue)) {
+ wouldBeArr = wouldBeValue;
+ } else if (wouldBeValue == null) {
+ wouldBeArr = [];
+ } else {
+ wouldBeArr = [wouldBeValue];
+ }
+
+ let lastVersionArr;
+ if (Array.isArray(lastVersionValue)) {
+ lastVersionArr = lastVersionValue;
+ } else if (lastVersionValue == null) {
+ lastVersionArr = [];
+ } else {
+ lastVersionArr = [lastVersionValue];
+ }
+
+ if (wouldBeArr.length !== lastVersionArr.length) {
+ isMatch = false;
+ break;
+ }
+
+ // Special handling for projectIds (MongoDB ObjectIds)
+ if (field === 'projectIds') {
+ const wouldBeIds = wouldBeArr.map((id) => id.toString()).sort();
+ const versionIds = lastVersionArr.map((id) => id.toString()).sort();
+
+ if (!wouldBeIds.every((id, i) => id === versionIds[i])) {
+ isMatch = false;
+ break;
+ }
+ }
+ // Handle arrays of objects
+ else if (
+ wouldBeArr.length > 0 &&
+ typeof wouldBeArr[0] === 'object' &&
+ wouldBeArr[0] !== null
+ ) {
+ const sortedWouldBe = [...wouldBeArr].map((item) => JSON.stringify(item)).sort();
+ const sortedVersion = [...lastVersionArr].map((item) => JSON.stringify(item)).sort();
+
+ if (!sortedWouldBe.every((item, i) => item === sortedVersion[i])) {
+ isMatch = false;
+ break;
+ }
+ } else {
+ const sortedWouldBe = [...wouldBeArr].sort();
+ const sortedVersion = [...lastVersionArr].sort();
+
+ if (!sortedWouldBe.every((item, i) => item === sortedVersion[i])) {
+ isMatch = false;
+ break;
+ }
+ }
+ }
+ // Handle objects
+ else if (typeof wouldBeValue === 'object' && wouldBeValue !== null) {
+ const lastVersionObj =
+ typeof lastVersionValue === 'object' && lastVersionValue !== null ? lastVersionValue : {};
+
+ // For empty objects, normalize the comparison
+ const wouldBeKeys = Object.keys(wouldBeValue);
+ const lastVersionKeys = Object.keys(lastVersionObj);
+
+ // If both are empty objects, they're equal
+ if (wouldBeKeys.length === 0 && lastVersionKeys.length === 0) {
+ continue;
+ }
+
+ // Otherwise do a deep comparison
+ if (JSON.stringify(wouldBeValue) !== JSON.stringify(lastVersionObj)) {
+ isMatch = false;
+ break;
+ }
+ }
+ // Handle primitive values
+ else {
+ // For primitives, handle the case where one is undefined and the other is a default value
+ if (wouldBeValue !== lastVersionValue) {
+ // Special handling for boolean false vs undefined
+ if (
+ typeof wouldBeValue === 'boolean' &&
+ wouldBeValue === false &&
+ lastVersionValue === undefined
+ ) {
+ continue;
+ }
+ // Special handling for empty string vs undefined
+ if (
+ typeof wouldBeValue === 'string' &&
+ wouldBeValue === '' &&
+ lastVersionValue === undefined
+ ) {
+ continue;
+ }
+ isMatch = false;
+ break;
+ }
+ }
+ }
+
+ return isMatch ? lastVersion : null;
+};
+
+/**
+ * Update an agent with new data without overwriting existing
+ * properties, or create a new agent if it doesn't exist.
+ * When an agent is updated, a copy of the current state will be saved to the versions array.
+ *
+ * @param {Object} searchParameter - The search parameters to find the agent to update.
+ * @param {string} searchParameter.id - The ID of the agent to update.
+ * @param {string} [searchParameter.author] - The user ID of the agent's author.
+ * @param {Object} updateData - An object containing the properties to update.
+ * @param {Object} [options] - Optional configuration object.
+ * @param {string} [options.updatingUserId] - The ID of the user performing the update (used for tracking non-author updates).
+ * @param {boolean} [options.forceVersion] - Force creation of a new version even if no fields changed.
+ * @param {boolean} [options.skipVersioning] - Skip version creation entirely (useful for isolated operations like sharing).
+ * @returns {Promise} The updated or newly created agent document as a plain object.
+ * @throws {Error} If the update would create a duplicate version
+ */
+const updateAgent = async (searchParameter, updateData, options = {}) => {
+ const { updatingUserId = null, forceVersion = false, skipVersioning = false } = options;
+ const mongoOptions = { new: true, upsert: false };
+
+ const currentAgent = await Agent.findOne(searchParameter);
+ if (currentAgent) {
+ const {
+ __v,
+ _id,
+ id: __id,
+ versions,
+ author: _author,
+ ...versionData
+ } = currentAgent.toObject();
+ const { $push, $pull, $addToSet, ...directUpdates } = updateData;
+
+ // Sync mcpServerNames when tools are updated
+ if (directUpdates.tools !== undefined) {
+ const mcpServerNames = extractMCPServerNames(directUpdates.tools);
+ directUpdates.mcpServerNames = mcpServerNames;
+ updateData.mcpServerNames = mcpServerNames; // Also update the original updateData
+ }
+
+ let actionsHash = null;
+
+ // Generate actions hash if agent has actions
+ if (currentAgent.actions && currentAgent.actions.length > 0) {
+ // Extract action IDs from the format "domain_action_id"
+ const actionIds = currentAgent.actions
+ .map((action) => {
+ const parts = action.split(actionDelimiter);
+ return parts[1]; // Get just the action ID part
+ })
+ .filter(Boolean);
+
+ if (actionIds.length > 0) {
+ try {
+ const actions = await getActions(
+ {
+ action_id: { $in: actionIds },
+ },
+ true,
+ ); // Include sensitive data for hash
+
+ actionsHash = await generateActionMetadataHash(currentAgent.actions, actions);
+ } catch (error) {
+ logger.error('Error fetching actions for hash generation:', error);
+ }
+ }
+ }
+
+ const shouldCreateVersion =
+ !skipVersioning &&
+ (forceVersion || Object.keys(directUpdates).length > 0 || $push || $pull || $addToSet);
+
+ if (shouldCreateVersion) {
+ const duplicateVersion = isDuplicateVersion(updateData, versionData, versions, actionsHash);
+ if (duplicateVersion && !forceVersion) {
+ // No changes detected, return the current agent without creating a new version
+ const agentObj = currentAgent.toObject();
+ agentObj.version = versions.length;
+ return agentObj;
+ }
+ }
+
+ const versionEntry = {
+ ...versionData,
+ ...directUpdates,
+ updatedAt: new Date(),
+ };
+
+ // Include actions hash in version if available
+ if (actionsHash) {
+ versionEntry.actionsHash = actionsHash;
+ }
+
+ // Always store updatedBy field to track who made the change
+ if (updatingUserId) {
+ versionEntry.updatedBy = new mongoose.Types.ObjectId(updatingUserId);
+ }
+
+ if (shouldCreateVersion) {
+ updateData.$push = {
+ ...($push || {}),
+ versions: versionEntry,
+ };
+ }
+ }
+
+ return Agent.findOneAndUpdate(searchParameter, updateData, mongoOptions).lean();
+};
+
+/**
+ * Modifies an agent with the resource file id.
+ * @param {object} params
+ * @param {ServerRequest} params.req
+ * @param {string} params.agent_id
+ * @param {string} params.tool_resource
+ * @param {string} params.file_id
+ * @returns {Promise} The updated agent.
+ */
+const addAgentResourceFile = async ({ req, agent_id, tool_resource, file_id }) => {
+ const searchParameter = { id: agent_id };
+ let agent = await getAgent(searchParameter);
+ if (!agent) {
+ throw new Error('Agent not found for adding resource file');
+ }
+ const fileIdsPath = `tool_resources.${tool_resource}.file_ids`;
+ await Agent.updateOne(
+ {
+ id: agent_id,
+ [`${fileIdsPath}`]: { $exists: false },
+ },
+ {
+ $set: {
+ [`${fileIdsPath}`]: [],
+ },
+ },
+ );
+
+ const updateData = {
+ $addToSet: {
+ tools: tool_resource,
+ [fileIdsPath]: file_id,
+ },
+ };
+
+ const updatedAgent = await updateAgent(searchParameter, updateData, {
+ updatingUserId: req?.user?.id,
+ });
+ if (updatedAgent) {
+ return updatedAgent;
+ } else {
+ throw new Error('Agent not found for adding resource file');
+ }
+};
+
+/**
+ * Removes multiple resource files from an agent using atomic operations.
+ * @param {object} params
+ * @param {string} params.agent_id
+ * @param {Array<{tool_resource: string, file_id: string}>} params.files
+ * @returns {Promise} The updated agent.
+ * @throws {Error} If the agent is not found or update fails.
+ */
+const removeAgentResourceFiles = async ({ agent_id, files }) => {
+ const searchParameter = { id: agent_id };
+
+ // Group files to remove by resource
+ const filesByResource = files.reduce((acc, { tool_resource, file_id }) => {
+ if (!acc[tool_resource]) {
+ acc[tool_resource] = [];
+ }
+ acc[tool_resource].push(file_id);
+ return acc;
+ }, {});
+
+ // Step 1: Atomically remove file IDs using $pull
+ const pullOps = {};
+ const resourcesToCheck = new Set();
+ for (const [resource, fileIds] of Object.entries(filesByResource)) {
+ const fileIdsPath = `tool_resources.${resource}.file_ids`;
+ pullOps[fileIdsPath] = { $in: fileIds };
+ resourcesToCheck.add(resource);
+ }
+
+ const updatePullData = { $pull: pullOps };
+ const agentAfterPull = await Agent.findOneAndUpdate(searchParameter, updatePullData, {
+ new: true,
+ }).lean();
+
+ if (!agentAfterPull) {
+ // Agent might have been deleted concurrently, or never existed.
+ // Check if it existed before trying to throw.
+ const agentExists = await getAgent(searchParameter);
+ if (!agentExists) {
+ throw new Error('Agent not found for removing resource files');
+ }
+ // If it existed but findOneAndUpdate returned null, something else went wrong.
+ throw new Error('Failed to update agent during file removal (pull step)');
+ }
+
+ // Return the agent state directly after the $pull operation.
+ // Skipping the $unset step for now to simplify and test core $pull atomicity.
+ // Empty arrays might remain, but the removal itself should be correct.
+ return agentAfterPull;
+};
+
+/**
+ * Deletes an agent based on the provided ID.
+ *
+ * @param {Object} searchParameter - The search parameters to find the agent to delete.
+ * @param {string} searchParameter.id - The ID of the agent to delete.
+ * @param {string} [searchParameter.author] - The user ID of the agent's author.
+ * @returns {Promise} Resolves when the agent has been successfully deleted.
+ */
+const deleteAgent = async (searchParameter) => {
+ const agent = await Agent.findOneAndDelete(searchParameter);
+ if (agent) {
+ await removeAgentFromAllProjects(agent.id);
+ await Promise.all([
+ removeAllPermissions({
+ resourceType: ResourceType.AGENT,
+ resourceId: agent._id,
+ }),
+ removeAllPermissions({
+ resourceType: ResourceType.REMOTE_AGENT,
+ resourceId: agent._id,
+ }),
+ ]);
+ try {
+ await Agent.updateMany({ 'edges.to': agent.id }, { $pull: { edges: { to: agent.id } } });
+ } catch (error) {
+ logger.error('[deleteAgent] Error removing agent from handoff edges', error);
+ }
+ try {
+ await User.updateMany(
+ { 'favorites.agentId': agent.id },
+ { $pull: { favorites: { agentId: agent.id } } },
+ );
+ } catch (error) {
+ logger.error('[deleteAgent] Error removing agent from user favorites', error);
+ }
+ }
+ return agent;
+};
+
+/**
+ * Deletes all agents created by a specific user.
+ * @param {string} userId - The ID of the user whose agents should be deleted.
+ * @returns {Promise} A promise that resolves when all user agents have been deleted.
+ */
+const deleteUserAgents = async (userId) => {
+ try {
+ const userAgents = await getAgents({ author: userId });
+
+ if (userAgents.length === 0) {
+ return;
+ }
+
+ const agentIds = userAgents.map((agent) => agent.id);
+ const agentObjectIds = userAgents.map((agent) => agent._id);
+
+ for (const agentId of agentIds) {
+ await removeAgentFromAllProjects(agentId);
+ }
+
+ await AclEntry.deleteMany({
+ resourceType: { $in: [ResourceType.AGENT, ResourceType.REMOTE_AGENT] },
+ resourceId: { $in: agentObjectIds },
+ });
+
+ try {
+ await User.updateMany(
+ { 'favorites.agentId': { $in: agentIds } },
+ { $pull: { favorites: { agentId: { $in: agentIds } } } },
+ );
+ } catch (error) {
+ logger.error('[deleteUserAgents] Error removing agents from user favorites', error);
+ }
+
+ await Agent.deleteMany({ author: userId });
+ } catch (error) {
+ logger.error('[deleteUserAgents] General error:', error);
+ }
+};
+
+/**
+ * Get agents by accessible IDs with optional cursor-based pagination.
+ * @param {Object} params - The parameters for getting accessible agents.
+ * @param {Array} [params.accessibleIds] - Array of agent ObjectIds the user has ACL access to.
+ * @param {Object} [params.otherParams] - Additional query parameters (including author filter).
+ * @param {number} [params.limit] - Number of agents to return (max 100). If not provided, returns all agents.
+ * @param {string} [params.after] - Cursor for pagination - get agents after this cursor. // base64 encoded JSON string with updatedAt and _id.
+ * @returns {Promise} A promise that resolves to an object containing the agents data and pagination info.
+ */
+const getListAgentsByAccess = async ({
+ accessibleIds = [],
+ otherParams = {},
+ limit = null,
+ after = null,
+}) => {
+ const isPaginated = limit !== null && limit !== undefined;
+ const normalizedLimit = isPaginated ? Math.min(Math.max(1, parseInt(limit) || 20), 100) : null;
+
+ // Build base query combining ACL accessible agents with other filters
+ const baseQuery = { ...otherParams, _id: { $in: accessibleIds } };
+
+ // Add cursor condition
+ if (after) {
+ try {
+ const cursor = JSON.parse(Buffer.from(after, 'base64').toString('utf8'));
+ const { updatedAt, _id } = cursor;
+
+ const cursorCondition = {
+ $or: [
+ { updatedAt: { $lt: new Date(updatedAt) } },
+ { updatedAt: new Date(updatedAt), _id: { $gt: new mongoose.Types.ObjectId(_id) } },
+ ],
+ };
+
+ // Merge cursor condition with base query
+ if (Object.keys(baseQuery).length > 0) {
+ baseQuery.$and = [{ ...baseQuery }, cursorCondition];
+ // Remove the original conditions from baseQuery to avoid duplication
+ Object.keys(baseQuery).forEach((key) => {
+ if (key !== '$and') delete baseQuery[key];
+ });
+ } else {
+ Object.assign(baseQuery, cursorCondition);
+ }
+ } catch (error) {
+ logger.warn('Invalid cursor:', error.message);
+ }
+ }
+
+ let query = Agent.find(baseQuery, {
+ id: 1,
+ _id: 1,
+ name: 1,
+ avatar: 1,
+ author: 1,
+ projectIds: 1,
+ description: 1,
+ updatedAt: 1,
+ category: 1,
+ support_contact: 1,
+ is_promoted: 1,
+ }).sort({ updatedAt: -1, _id: 1 });
+
+ // Only apply limit if pagination is requested
+ if (isPaginated) {
+ query = query.limit(normalizedLimit + 1);
+ }
+
+ const agents = await query.lean();
+
+ const hasMore = isPaginated ? agents.length > normalizedLimit : false;
+ const data = (isPaginated ? agents.slice(0, normalizedLimit) : agents).map((agent) => {
+ if (agent.author) {
+ agent.author = agent.author.toString();
+ }
+ return agent;
+ });
+
+ // Generate next cursor only if paginated
+ let nextCursor = null;
+ if (isPaginated && hasMore && data.length > 0) {
+ const lastAgent = agents[normalizedLimit - 1];
+ nextCursor = Buffer.from(
+ JSON.stringify({
+ updatedAt: lastAgent.updatedAt.toISOString(),
+ _id: lastAgent._id.toString(),
+ }),
+ ).toString('base64');
+ }
+
+ return {
+ object: 'list',
+ data,
+ first_id: data.length > 0 ? data[0].id : null,
+ last_id: data.length > 0 ? data[data.length - 1].id : null,
+ has_more: hasMore,
+ after: nextCursor,
+ };
+};
+
+/**
+ * Updates the projects associated with an agent, adding and removing project IDs as specified.
+ * This function also updates the corresponding projects to include or exclude the agent ID.
+ *
+ * @param {Object} params - Parameters for updating the agent's projects.
+ * @param {IUser} params.user - Parameters for updating the agent's projects.
+ * @param {string} params.agentId - The ID of the agent to update.
+ * @param {string[]} [params.projectIds] - Array of project IDs to add to the agent.
+ * @param {string[]} [params.removeProjectIds] - Array of project IDs to remove from the agent.
+ * @returns {Promise} The updated agent document.
+ * @throws {Error} If there's an error updating the agent or projects.
+ */
+const updateAgentProjects = async ({ user, agentId, projectIds, removeProjectIds }) => {
+ const updateOps = {};
+
+ if (removeProjectIds && removeProjectIds.length > 0) {
+ for (const projectId of removeProjectIds) {
+ await removeAgentIdsFromProject(projectId, [agentId]);
+ }
+ updateOps.$pull = { projectIds: { $in: removeProjectIds } };
+ }
+
+ if (projectIds && projectIds.length > 0) {
+ for (const projectId of projectIds) {
+ await addAgentIdsToProject(projectId, [agentId]);
+ }
+ updateOps.$addToSet = { projectIds: { $each: projectIds } };
+ }
+
+ if (Object.keys(updateOps).length === 0) {
+ return await getAgent({ id: agentId });
+ }
+
+ const updateQuery = { id: agentId, author: user.id };
+ if (user.role === SystemRoles.ADMIN) {
+ delete updateQuery.author;
+ }
+
+ const updatedAgent = await updateAgent(updateQuery, updateOps, {
+ updatingUserId: user.id,
+ skipVersioning: true,
+ });
+ if (updatedAgent) {
+ return updatedAgent;
+ }
+ if (updateOps.$addToSet) {
+ for (const projectId of projectIds) {
+ await removeAgentIdsFromProject(projectId, [agentId]);
+ }
+ } else if (updateOps.$pull) {
+ for (const projectId of removeProjectIds) {
+ await addAgentIdsToProject(projectId, [agentId]);
+ }
+ }
+
+ return await getAgent({ id: agentId });
+};
+
+/**
+ * Reverts an agent to a specific version in its version history.
+ * @param {Object} searchParameter - The search parameters to find the agent to revert.
+ * @param {string} searchParameter.id - The ID of the agent to revert.
+ * @param {string} [searchParameter.author] - The user ID of the agent's author.
+ * @param {number} versionIndex - The index of the version to revert to in the versions array.
+ * @returns {Promise} The updated agent document after reverting.
+ * @throws {Error} If the agent is not found or the specified version does not exist.
+ */
+const revertAgentVersion = async (searchParameter, versionIndex) => {
+ const agent = await Agent.findOne(searchParameter);
+ if (!agent) {
+ throw new Error('Agent not found');
+ }
+
+ if (!agent.versions || !agent.versions[versionIndex]) {
+ throw new Error(`Version ${versionIndex} not found`);
+ }
+
+ const revertToVersion = agent.versions[versionIndex];
+
+ const updateData = {
+ ...revertToVersion,
+ };
+
+ delete updateData._id;
+ delete updateData.id;
+ delete updateData.versions;
+ delete updateData.author;
+ delete updateData.updatedBy;
+
+ return Agent.findOneAndUpdate(searchParameter, updateData, { new: true }).lean();
+};
+
+/**
+ * Generates a hash of action metadata for version comparison
+ * @param {string[]} actionIds - Array of action IDs in format "domain_action_id"
+ * @param {Action[]} actions - Array of action documents
+ * @returns {Promise} - SHA256 hash of the action metadata
+ */
+const generateActionMetadataHash = async (actionIds, actions) => {
+ if (!actionIds || actionIds.length === 0) {
+ return '';
+ }
+
+ // Create a map of action_id to metadata for quick lookup
+ const actionMap = new Map();
+ actions.forEach((action) => {
+ actionMap.set(action.action_id, action.metadata);
+ });
+
+ // Sort action IDs for consistent hashing
+ const sortedActionIds = [...actionIds].sort();
+
+ // Build a deterministic string representation of all action metadata
+ const metadataString = sortedActionIds
+ .map((actionFullId) => {
+ // Extract just the action_id part (after the delimiter)
+ const parts = actionFullId.split(actionDelimiter);
+ const actionId = parts[1];
+
+ const metadata = actionMap.get(actionId);
+ if (!metadata) {
+ return `${actionId}:null`;
+ }
+
+ // Sort metadata keys for deterministic output
+ const sortedKeys = Object.keys(metadata).sort();
+ const metadataStr = sortedKeys
+ .map((key) => `${key}:${JSON.stringify(metadata[key])}`)
+ .join(',');
+ return `${actionId}:{${metadataStr}}`;
+ })
+ .join(';');
+
+ // Use Web Crypto API to generate hash
+ const encoder = new TextEncoder();
+ const data = encoder.encode(metadataString);
+ const hashBuffer = await crypto.webcrypto.subtle.digest('SHA-256', data);
+ const hashArray = Array.from(new Uint8Array(hashBuffer));
+ const hashHex = hashArray.map((b) => b.toString(16).padStart(2, '0')).join('');
+
+ return hashHex;
+};
+/**
+ * Counts the number of promoted agents.
+ * @returns {Promise} - The count of promoted agents
+ */
+const countPromotedAgents = async () => {
+ const count = await Agent.countDocuments({ is_promoted: true });
+ return count;
+};
+
+/**
+ * Load a default agent based on the endpoint
+ * @param {string} endpoint
+ * @returns {Agent | null}
+ */
+
+module.exports = {
+ getAgent,
+ getAgents,
+ loadAgent,
+ createAgent,
+ updateAgent,
+ deleteAgent,
+ deleteUserAgents,
+ revertAgentVersion,
+ updateAgentProjects,
+ countPromotedAgents,
+ addAgentResourceFile,
+ getListAgentsByAccess,
+ removeAgentResourceFiles,
+ generateActionMetadataHash,
+};
diff --git a/packages/data-schemas/src/methods/agent.spec.ts b/api/models/Agent.spec.js
similarity index 66%
rename from packages/data-schemas/src/methods/agent.spec.ts
rename to api/models/Agent.spec.js
index 3184f51fa1..baceb3e8f3 100644
--- a/packages/data-schemas/src/methods/agent.spec.ts
+++ b/api/models/Agent.spec.js
@@ -1,128 +1,62 @@
-import mongoose from 'mongoose';
-import { v4 as uuidv4 } from 'uuid';
-import { MongoMemoryServer } from 'mongodb-memory-server';
-import {
- AccessRoleIds,
- ResourceType,
- PrincipalType,
- PrincipalModel,
- PermissionBits,
- EToolResources,
-} from 'librechat-data-provider';
-import type {
- UpdateWithAggregationPipeline,
- RootFilterQuery,
- QueryOptions,
- UpdateQuery,
-} from 'mongoose';
-import type { IAgent, IAclEntry, IUser, IAccessRole } from '..';
-import { createAgentMethods, type AgentMethods } from './agent';
-import { createAclEntryMethods } from './aclEntry';
-import { createModels } from '~/models';
-
-/** Version snapshot stored in `IAgent.versions[]`. Extends the base omit with runtime-only fields. */
-type VersionEntry = Omit & {
- __v?: number;
- versions?: unknown;
- version?: number;
- updatedBy?: mongoose.Types.ObjectId;
+const originalEnv = {
+ CREDS_KEY: process.env.CREDS_KEY,
+ CREDS_IV: process.env.CREDS_IV,
};
-jest.mock('~/config/winston', () => ({
- error: jest.fn(),
- warn: jest.fn(),
- info: jest.fn(),
- debug: jest.fn(),
+process.env.CREDS_KEY = '0123456789abcdef0123456789abcdef';
+process.env.CREDS_IV = '0123456789abcdef';
+
+jest.mock('~/server/services/Config', () => ({
+ getCachedTools: jest.fn(),
+ getMCPServerTools: jest.fn(),
}));
-let mongoServer: InstanceType;
-let Agent: mongoose.Model;
-let AclEntry: mongoose.Model;
-let User: mongoose.Model;
-let AccessRole: mongoose.Model;
-let modelsToCleanup: string[] = [];
-let methods: ReturnType;
+const mongoose = require('mongoose');
+const { v4: uuidv4 } = require('uuid');
+const { agentSchema } = require('@librechat/data-schemas');
+const { MongoMemoryServer } = require('mongodb-memory-server');
+const { AccessRoleIds, ResourceType, PrincipalType } = require('librechat-data-provider');
+const {
+ getAgent,
+ loadAgent,
+ createAgent,
+ updateAgent,
+ deleteAgent,
+ deleteUserAgents,
+ revertAgentVersion,
+ updateAgentProjects,
+ addAgentResourceFile,
+ getListAgentsByAccess,
+ removeAgentResourceFiles,
+ generateActionMetadataHash,
+} = require('./Agent');
+const permissionService = require('~/server/services/PermissionService');
+const { getCachedTools, getMCPServerTools } = require('~/server/services/Config');
+const { AclEntry, User } = require('~/db/models');
-let createAgent: AgentMethods['createAgent'];
-let getAgent: AgentMethods['getAgent'];
-let updateAgent: AgentMethods['updateAgent'];
-let deleteAgent: AgentMethods['deleteAgent'];
-let deleteUserAgents: AgentMethods['deleteUserAgents'];
-let revertAgentVersion: AgentMethods['revertAgentVersion'];
-let addAgentResourceFile: AgentMethods['addAgentResourceFile'];
-let removeAgentResourceFiles: AgentMethods['removeAgentResourceFiles'];
-let getListAgentsByAccess: AgentMethods['getListAgentsByAccess'];
-let generateActionMetadataHash: AgentMethods['generateActionMetadataHash'];
+/**
+ * @type {import('mongoose').Model}
+ */
+let Agent;
-const getActions = jest.fn().mockResolvedValue([]);
-
-beforeAll(async () => {
- mongoServer = await MongoMemoryServer.create();
- const mongoUri = mongoServer.getUri();
-
- const models = createModels(mongoose);
- modelsToCleanup = Object.keys(models);
- Agent = mongoose.models.Agent as mongoose.Model;
- AclEntry = mongoose.models.AclEntry as mongoose.Model;
- User = mongoose.models.User as mongoose.Model;
- AccessRole = mongoose.models.AccessRole as mongoose.Model;
-
- const removeAllPermissions = async ({
- resourceType,
- resourceId,
- }: {
- resourceType: string;
- resourceId: unknown;
- }) => {
- await AclEntry.deleteMany({ resourceType, resourceId });
- };
-
- const aclEntryMethods = createAclEntryMethods(mongoose);
- const { getSoleOwnedResourceIds } = aclEntryMethods;
-
- methods = createAgentMethods(mongoose, {
- removeAllPermissions,
- getActions,
- getSoleOwnedResourceIds,
- });
- createAgent = methods.createAgent;
- getAgent = methods.getAgent;
- updateAgent = methods.updateAgent;
- deleteAgent = methods.deleteAgent;
- deleteUserAgents = methods.deleteUserAgents;
- revertAgentVersion = methods.revertAgentVersion;
- addAgentResourceFile = methods.addAgentResourceFile;
- removeAgentResourceFiles = methods.removeAgentResourceFiles;
- getListAgentsByAccess = methods.getListAgentsByAccess;
- generateActionMetadataHash = methods.generateActionMetadataHash;
-
- await mongoose.connect(mongoUri);
-
- await AccessRole.create({
- accessRoleId: AccessRoleIds.AGENT_OWNER,
- name: 'Owner',
- description: 'Full control over agents',
- resourceType: ResourceType.AGENT,
- permBits: 15,
- });
-}, 30000);
-
-afterAll(async () => {
- const collections = mongoose.connection.collections;
- for (const key in collections) {
- await collections[key].deleteMany({});
- }
- for (const modelName of modelsToCleanup) {
- if (mongoose.models[modelName]) {
- delete (mongoose.models as Record)[modelName];
- }
- }
- await mongoose.disconnect();
- await mongoServer.stop();
-});
-
-describe('Agent Methods', () => {
+describe('models/Agent', () => {
describe('Agent Resource File Operations', () => {
+ let mongoServer;
+
+ beforeAll(async () => {
+ mongoServer = await MongoMemoryServer.create();
+ const mongoUri = mongoServer.getUri();
+ Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
+ await mongoose.connect(mongoUri);
+ }, 20000);
+
+ afterAll(async () => {
+ await mongoose.disconnect();
+ await mongoServer.stop();
+ process.env.CREDS_KEY = originalEnv.CREDS_KEY;
+ process.env.CREDS_IV = originalEnv.CREDS_IV;
+ });
+
beforeEach(async () => {
await Agent.deleteMany({});
await User.deleteMany({});
@@ -139,10 +73,10 @@ describe('Agent Methods', () => {
file_id: fileId,
});
- expect(updatedAgent!.tools).toContain(toolResource);
- expect(Array.isArray(updatedAgent!.tools)).toBe(true);
+ expect(updatedAgent.tools).toContain(toolResource);
+ expect(Array.isArray(updatedAgent.tools)).toBe(true);
// Should not duplicate
- const count = updatedAgent!.tools?.filter((t) => t === toolResource).length ?? 0;
+ const count = updatedAgent.tools.filter((t) => t === toolResource).length;
expect(count).toBe(1);
});
@@ -166,9 +100,9 @@ describe('Agent Methods', () => {
file_id: fileId2,
});
- expect(updatedAgent!.tools).toContain(toolResource);
- expect(Array.isArray(updatedAgent!.tools)).toBe(true);
- const count = updatedAgent!.tools?.filter((t) => t === toolResource).length ?? 0;
+ expect(updatedAgent.tools).toContain(toolResource);
+ expect(Array.isArray(updatedAgent.tools)).toBe(true);
+ const count = updatedAgent.tools.filter((t) => t === toolResource).length;
expect(count).toBe(1);
});
@@ -182,13 +116,9 @@ describe('Agent Methods', () => {
await Promise.all(additionPromises);
const updatedAgent = await Agent.findOne({ id: agent.id });
- expect(updatedAgent?.tool_resources?.[EToolResources.execute_code]?.file_ids).toBeDefined();
- expect(updatedAgent?.tool_resources?.[EToolResources.execute_code]?.file_ids).toHaveLength(
- 10,
- );
- expect(
- new Set(updatedAgent?.tool_resources?.[EToolResources.execute_code]?.file_ids).size,
- ).toBe(10);
+ expect(updatedAgent.tool_resources.test_tool.file_ids).toBeDefined();
+ expect(updatedAgent.tool_resources.test_tool.file_ids).toHaveLength(10);
+ expect(new Set(updatedAgent.tool_resources.test_tool.file_ids).size).toBe(10);
});
test('should handle concurrent additions and removals', async () => {
@@ -198,18 +128,18 @@ describe('Agent Methods', () => {
await Promise.all(createFileOperations(agent.id, initialFileIds, 'add'));
const newFileIds = Array.from({ length: 5 }, () => uuidv4());
- const operations: Promise[] = [
+ const operations = [
...newFileIds.map((fileId) =>
addAgentResourceFile({
agent_id: agent.id,
- tool_resource: EToolResources.execute_code,
+ tool_resource: 'test_tool',
file_id: fileId,
}),
),
...initialFileIds.map((fileId) =>
removeAgentResourceFiles({
agent_id: agent.id,
- files: [{ tool_resource: EToolResources.execute_code, file_id: fileId }],
+ files: [{ tool_resource: 'test_tool', file_id: fileId }],
}),
),
];
@@ -217,8 +147,8 @@ describe('Agent Methods', () => {
await Promise.all(operations);
const updatedAgent = await Agent.findOne({ id: agent.id });
- expect(updatedAgent?.tool_resources?.[EToolResources.execute_code]?.file_ids).toBeDefined();
- expect(updatedAgent?.tool_resources?.[EToolResources.execute_code]?.file_ids).toHaveLength(5);
+ expect(updatedAgent.tool_resources.test_tool.file_ids).toBeDefined();
+ expect(updatedAgent.tool_resources.test_tool.file_ids).toHaveLength(5);
});
test('should initialize array when adding to non-existent tool resource', async () => {
@@ -227,13 +157,13 @@ describe('Agent Methods', () => {
const updatedAgent = await addAgentResourceFile({
agent_id: agent.id,
- tool_resource: EToolResources.context,
+ tool_resource: 'new_tool',
file_id: fileId,
});
- expect(updatedAgent?.tool_resources?.[EToolResources.context]?.file_ids).toBeDefined();
- expect(updatedAgent?.tool_resources?.[EToolResources.context]?.file_ids).toHaveLength(1);
- expect(updatedAgent?.tool_resources?.[EToolResources.context]?.file_ids?.[0]).toBe(fileId);
+ expect(updatedAgent.tool_resources.new_tool.file_ids).toBeDefined();
+ expect(updatedAgent.tool_resources.new_tool.file_ids).toHaveLength(1);
+ expect(updatedAgent.tool_resources.new_tool.file_ids[0]).toBe(fileId);
});
test('should handle rapid sequential modifications to same tool resource', async () => {
@@ -243,33 +173,27 @@ describe('Agent Methods', () => {
for (let i = 0; i < 10; i++) {
await addAgentResourceFile({
agent_id: agent.id,
- tool_resource: EToolResources.execute_code,
+ tool_resource: 'test_tool',
file_id: `${fileId}_${i}`,
});
if (i % 2 === 0) {
await removeAgentResourceFiles({
agent_id: agent.id,
- files: [{ tool_resource: EToolResources.execute_code, file_id: `${fileId}_${i}` }],
+ files: [{ tool_resource: 'test_tool', file_id: `${fileId}_${i}` }],
});
}
}
const updatedAgent = await Agent.findOne({ id: agent.id });
- expect(updatedAgent?.tool_resources?.[EToolResources.execute_code]?.file_ids).toBeDefined();
- expect(
- Array.isArray(updatedAgent!.tool_resources![EToolResources.execute_code]!.file_ids),
- ).toBe(true);
+ expect(updatedAgent.tool_resources.test_tool.file_ids).toBeDefined();
+ expect(Array.isArray(updatedAgent.tool_resources.test_tool.file_ids)).toBe(true);
});
test('should handle multiple tool resources concurrently', async () => {
const agent = await createBasicAgent();
- const toolResources = [
- EToolResources.file_search,
- EToolResources.execute_code,
- EToolResources.image_edit,
- ] as const;
- const operations: Promise[] = [];
+ const toolResources = ['tool1', 'tool2', 'tool3'];
+ const operations = [];
toolResources.forEach((tool) => {
const fileIds = Array.from({ length: 5 }, () => uuidv4());
@@ -288,8 +212,8 @@ describe('Agent Methods', () => {
const updatedAgent = await Agent.findOne({ id: agent.id });
toolResources.forEach((tool) => {
- expect(updatedAgent!.tool_resources![tool]!.file_ids).toBeDefined();
- expect(updatedAgent!.tool_resources![tool]!.file_ids).toHaveLength(5);
+ expect(updatedAgent.tool_resources[tool].file_ids).toBeDefined();
+ expect(updatedAgent.tool_resources[tool].file_ids).toHaveLength(5);
});
});
@@ -318,7 +242,7 @@ describe('Agent Methods', () => {
if (setupFile) {
await addAgentResourceFile({
agent_id: agent.id,
- tool_resource: EToolResources.execute_code,
+ tool_resource: 'test_tool',
file_id: fileId,
});
}
@@ -327,19 +251,19 @@ describe('Agent Methods', () => {
operation === 'add'
? addAgentResourceFile({
agent_id: agent.id,
- tool_resource: EToolResources.execute_code,
+ tool_resource: 'test_tool',
file_id: fileId,
})
: removeAgentResourceFiles({
agent_id: agent.id,
- files: [{ tool_resource: EToolResources.execute_code, file_id: fileId }],
+ files: [{ tool_resource: 'test_tool', file_id: fileId }],
}),
);
await Promise.all(promises);
const updatedAgent = await Agent.findOne({ id: agent.id });
- const fileIds = updatedAgent?.tool_resources?.[EToolResources.execute_code]?.file_ids ?? [];
+ const fileIds = updatedAgent.tool_resources?.test_tool?.file_ids ?? [];
expect(fileIds).toHaveLength(expectedLength);
if (expectedContains) {
@@ -356,27 +280,27 @@ describe('Agent Methods', () => {
await addAgentResourceFile({
agent_id: agent.id,
- tool_resource: EToolResources.execute_code,
+ tool_resource: 'test_tool',
file_id: fileId,
});
- const operations: Promise[] = [
+ const operations = [
addAgentResourceFile({
agent_id: agent.id,
- tool_resource: EToolResources.execute_code,
+ tool_resource: 'test_tool',
file_id: fileId,
}),
removeAgentResourceFiles({
agent_id: agent.id,
- files: [{ tool_resource: EToolResources.execute_code, file_id: fileId }],
+ files: [{ tool_resource: 'test_tool', file_id: fileId }],
}),
];
await Promise.all(operations);
const updatedAgent = await Agent.findOne({ id: agent.id });
- const finalFileIds = updatedAgent!.tool_resources![EToolResources.execute_code]!.file_ids!;
- const count = finalFileIds.filter((id: string) => id === fileId).length;
+ const finalFileIds = updatedAgent.tool_resources.test_tool.file_ids;
+ const count = finalFileIds.filter((id) => id === fileId).length;
expect(count).toBeLessThanOrEqual(1);
if (count === 0) {
@@ -396,7 +320,7 @@ describe('Agent Methods', () => {
fileIds.map((fileId) =>
addAgentResourceFile({
agent_id: agent.id,
- tool_resource: EToolResources.execute_code,
+ tool_resource: 'test_tool',
file_id: fileId,
}),
),
@@ -406,7 +330,7 @@ describe('Agent Methods', () => {
const removalPromises = fileIds.map((fileId) =>
removeAgentResourceFiles({
agent_id: agent.id,
- files: [{ tool_resource: EToolResources.execute_code, file_id: fileId }],
+ files: [{ tool_resource: 'test_tool', file_id: fileId }],
}),
);
@@ -414,8 +338,7 @@ describe('Agent Methods', () => {
const updatedAgent = await Agent.findOne({ id: agent.id });
// Check if the array is empty or the tool resource itself is removed
- const finalFileIds =
- updatedAgent?.tool_resources?.[EToolResources.execute_code]?.file_ids ?? [];
+ const finalFileIds = updatedAgent.tool_resources?.test_tool?.file_ids ?? [];
expect(finalFileIds).toHaveLength(0);
});
@@ -439,7 +362,7 @@ describe('Agent Methods', () => {
])('addAgentResourceFile with $name', ({ needsAgent, params, shouldResolve, error }) => {
test(`should ${shouldResolve ? 'resolve' : 'reject'}`, async () => {
const agent = needsAgent ? await createBasicAgent() : null;
- const agent_id = needsAgent ? agent!.id : `agent_${uuidv4()}`;
+ const agent_id = needsAgent ? agent.id : `agent_${uuidv4()}`;
if (shouldResolve) {
await expect(addAgentResourceFile({ agent_id, ...params })).resolves.toBeDefined();
@@ -452,7 +375,7 @@ describe('Agent Methods', () => {
describe.each([
{
name: 'empty files array',
- files: [] as { tool_resource: string; file_id: string }[],
+ files: [],
needsAgent: true,
shouldResolve: true,
},
@@ -472,7 +395,7 @@ describe('Agent Methods', () => {
])('removeAgentResourceFiles with $name', ({ files, needsAgent, shouldResolve, error }) => {
test(`should ${shouldResolve ? 'resolve' : 'reject'}`, async () => {
const agent = needsAgent ? await createBasicAgent() : null;
- const agent_id = needsAgent ? agent!.id : `agent_${uuidv4()}`;
+ const agent_id = needsAgent ? agent.id : `agent_${uuidv4()}`;
if (shouldResolve) {
const result = await removeAgentResourceFiles({ agent_id, files });
@@ -489,10 +412,36 @@ describe('Agent Methods', () => {
});
describe('Agent CRUD Operations', () => {
+ let mongoServer;
+ let AccessRole;
+
+ beforeAll(async () => {
+ mongoServer = await MongoMemoryServer.create();
+ const mongoUri = mongoServer.getUri();
+ Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
+ await mongoose.connect(mongoUri);
+
+ // Initialize models
+ const dbModels = require('~/db/models');
+ AccessRole = dbModels.AccessRole;
+
+ // Create necessary access roles for agents
+ await AccessRole.create({
+ accessRoleId: AccessRoleIds.AGENT_OWNER,
+ name: 'Owner',
+ description: 'Full control over agents',
+ resourceType: ResourceType.AGENT,
+ permBits: 15, // VIEW | EDIT | DELETE | SHARE
+ });
+ }, 20000);
+
+ afterAll(async () => {
+ await mongoose.disconnect();
+ await mongoServer.stop();
+ });
+
beforeEach(async () => {
await Agent.deleteMany({});
- await User.deleteMany({});
- await AclEntry.deleteMany({});
});
test('should create and get an agent', async () => {
@@ -513,9 +462,9 @@ describe('Agent Methods', () => {
const retrievedAgent = await getAgent({ id: agentId });
expect(retrievedAgent).toBeDefined();
- expect(retrievedAgent!.id).toBe(agentId);
- expect(retrievedAgent!.name).toBe('Test Agent');
- expect(retrievedAgent!.description).toBe('Test description');
+ expect(retrievedAgent.id).toBe(agentId);
+ expect(retrievedAgent.name).toBe('Test Agent');
+ expect(retrievedAgent.description).toBe('Test description');
});
test('should delete an agent', async () => {
@@ -553,9 +502,8 @@ describe('Agent Methods', () => {
});
// Grant permissions (simulating sharing)
- await AclEntry.create({
+ await permissionService.grantPermission({
principalType: PrincipalType.USER,
- principalModel: PrincipalModel.USER,
principalId: authorId,
resourceType: ResourceType.AGENT,
resourceId: agent._id,
@@ -617,15 +565,15 @@ describe('Agent Methods', () => {
// Verify edge exists before deletion
const sourceAgentBefore = await getAgent({ id: sourceAgentId });
- expect(sourceAgentBefore!.edges).toHaveLength(1);
- expect(sourceAgentBefore!.edges![0].to).toBe(targetAgentId);
+ expect(sourceAgentBefore.edges).toHaveLength(1);
+ expect(sourceAgentBefore.edges[0].to).toBe(targetAgentId);
// Delete the target agent
await deleteAgent({ id: targetAgentId });
// Verify the edge is removed from source agent
const sourceAgentAfter = await getAgent({ id: sourceAgentId });
- expect(sourceAgentAfter!.edges).toHaveLength(0);
+ expect(sourceAgentAfter.edges).toHaveLength(0);
});
test('should remove agent from user favorites when agent is deleted', async () => {
@@ -653,10 +601,8 @@ describe('Agent Methods', () => {
// Verify user has agent in favorites
const userBefore = await User.findById(userId);
- expect(userBefore!.favorites).toHaveLength(2);
- expect(
- userBefore!.favorites!.some((f: Record) => f.agentId === agentId),
- ).toBe(true);
+ expect(userBefore.favorites).toHaveLength(2);
+ expect(userBefore.favorites.some((f) => f.agentId === agentId)).toBe(true);
// Delete the agent
await deleteAgent({ id: agentId });
@@ -667,13 +613,9 @@ describe('Agent Methods', () => {
// Verify agent is removed from user favorites
const userAfter = await User.findById(userId);
- expect(userAfter!.favorites).toHaveLength(1);
- expect(
- userAfter!.favorites!.some((f: Record) => f.agentId === agentId),
- ).toBe(false);
- expect(userAfter!.favorites!.some((f: Record) => f.model === 'gpt-4')).toBe(
- true,
- );
+ expect(userAfter.favorites).toHaveLength(1);
+ expect(userAfter.favorites.some((f) => f.agentId === agentId)).toBe(false);
+ expect(userAfter.favorites.some((f) => f.model === 'gpt-4')).toBe(true);
});
test('should remove agent from multiple users favorites when agent is deleted', async () => {
@@ -715,11 +657,9 @@ describe('Agent Methods', () => {
const user1After = await User.findById(user1Id);
const user2After = await User.findById(user2Id);
- expect(user1After!.favorites).toHaveLength(0);
- expect(user2After!.favorites).toHaveLength(1);
- expect(
- user2After!.favorites!.some((f: Record) => f.agentId === agentId),
- ).toBe(false);
+ expect(user1After.favorites).toHaveLength(0);
+ expect(user2After.favorites).toHaveLength(1);
+ expect(user2After.favorites.some((f) => f.agentId === agentId)).toBe(false);
});
test('should preserve other agents in database when one agent is deleted', async () => {
@@ -766,9 +706,9 @@ describe('Agent Methods', () => {
const keptAgent1 = await getAgent({ id: agentToKeep1Id });
const keptAgent2 = await getAgent({ id: agentToKeep2Id });
expect(keptAgent1).not.toBeNull();
- expect(keptAgent1!.name).toBe('Agent To Keep 1');
+ expect(keptAgent1.name).toBe('Agent To Keep 1');
expect(keptAgent2).not.toBeNull();
- expect(keptAgent2!.name).toBe('Agent To Keep 2');
+ expect(keptAgent2.name).toBe('Agent To Keep 2');
});
test('should preserve other agents in user favorites when one agent is deleted', async () => {
@@ -818,23 +758,17 @@ describe('Agent Methods', () => {
// Verify user has all three agents in favorites
const userBefore = await User.findById(userId);
- expect(userBefore!.favorites).toHaveLength(3);
+ expect(userBefore.favorites).toHaveLength(3);
// Delete one agent
await deleteAgent({ id: agentToDeleteId });
// Verify only the deleted agent is removed from favorites
const userAfter = await User.findById(userId);
- expect(userAfter!.favorites).toHaveLength(2);
- expect(
- userAfter!.favorites?.some((f: Record) => f.agentId === agentToDeleteId),
- ).toBe(false);
- expect(
- userAfter!.favorites?.some((f: Record) => f.agentId === agentToKeep1Id),
- ).toBe(true);
- expect(
- userAfter!.favorites?.some((f: Record) => f.agentId === agentToKeep2Id),
- ).toBe(true);
+ expect(userAfter.favorites).toHaveLength(2);
+ expect(userAfter.favorites.some((f) => f.agentId === agentToDeleteId)).toBe(false);
+ expect(userAfter.favorites.some((f) => f.agentId === agentToKeep1Id)).toBe(true);
+ expect(userAfter.favorites.some((f) => f.agentId === agentToKeep2Id)).toBe(true);
});
test('should not affect users who do not have deleted agent in favorites', async () => {
@@ -884,27 +818,15 @@ describe('Agent Methods', () => {
// Verify user with deleted agent has it removed
const userWithDeleted = await User.findById(userWithDeletedAgentId);
- expect(userWithDeleted!.favorites).toHaveLength(1);
- expect(
- userWithDeleted!.favorites!.some(
- (f: Record) => f.agentId === agentToDeleteId,
- ),
- ).toBe(false);
- expect(
- userWithDeleted!.favorites!.some((f: Record) => f.model === 'gpt-4'),
- ).toBe(true);
+ expect(userWithDeleted.favorites).toHaveLength(1);
+ expect(userWithDeleted.favorites.some((f) => f.agentId === agentToDeleteId)).toBe(false);
+ expect(userWithDeleted.favorites.some((f) => f.model === 'gpt-4')).toBe(true);
// Verify user without deleted agent is completely unaffected
const userWithoutDeleted = await User.findById(userWithoutDeletedAgentId);
- expect(userWithoutDeleted!.favorites).toHaveLength(2);
- expect(
- userWithoutDeleted!.favorites!.some(
- (f: Record) => f.agentId === otherAgentId,
- ),
- ).toBe(true);
- expect(
- userWithoutDeleted!.favorites!.some((f: Record) => f.model === 'claude-3'),
- ).toBe(true);
+ expect(userWithoutDeleted.favorites).toHaveLength(2);
+ expect(userWithoutDeleted.favorites.some((f) => f.agentId === otherAgentId)).toBe(true);
+ expect(userWithoutDeleted.favorites.some((f) => f.model === 'claude-3')).toBe(true);
});
test('should remove all user agents from favorites when deleteUserAgents is called', async () => {
@@ -916,7 +838,8 @@ describe('Agent Methods', () => {
const agent2Id = `agent_${uuidv4()}`;
const otherAuthorAgentId = `agent_${uuidv4()}`;
- const agent1 = await createAgent({
+ // Create agents by the author to be deleted
+ await createAgent({
id: agent1Id,
name: 'Author Agent 1',
provider: 'test',
@@ -924,7 +847,7 @@ describe('Agent Methods', () => {
author: authorId,
});
- const agent2 = await createAgent({
+ await createAgent({
id: agent2Id,
name: 'Author Agent 2',
provider: 'test',
@@ -932,6 +855,7 @@ describe('Agent Methods', () => {
author: authorId,
});
+ // Create agent by different author (should not be deleted)
await createAgent({
id: otherAuthorAgentId,
name: 'Other Author Agent',
@@ -940,29 +864,7 @@ describe('Agent Methods', () => {
author: otherAuthorId,
});
- const ownerBits =
- PermissionBits.VIEW | PermissionBits.EDIT | PermissionBits.DELETE | PermissionBits.SHARE;
- await AclEntry.create({
- principalType: PrincipalType.USER,
- principalId: authorId,
- principalModel: PrincipalModel.USER,
- resourceType: ResourceType.AGENT,
- resourceId: agent1._id,
- permBits: ownerBits,
- grantedBy: authorId,
- grantedAt: new Date(),
- });
- await AclEntry.create({
- principalType: PrincipalType.USER,
- principalId: authorId,
- principalModel: PrincipalModel.USER,
- resourceType: ResourceType.AGENT,
- resourceId: agent2._id,
- permBits: ownerBits,
- grantedBy: authorId,
- grantedAt: new Date(),
- });
-
+ // Create user with all agents in favorites
await User.create({
_id: userId,
name: 'Test User',
@@ -976,32 +878,27 @@ describe('Agent Methods', () => {
],
});
+ // Verify user has all favorites
const userBefore = await User.findById(userId);
- expect(userBefore!.favorites).toHaveLength(4);
+ expect(userBefore.favorites).toHaveLength(4);
+ // Delete all agents by the author
await deleteUserAgents(authorId.toString());
+ // Verify author's agents are deleted from database
expect(await getAgent({ id: agent1Id })).toBeNull();
expect(await getAgent({ id: agent2Id })).toBeNull();
+ // Verify other author's agent still exists
expect(await getAgent({ id: otherAuthorAgentId })).not.toBeNull();
+ // Verify user favorites: author's agents removed, others remain
const userAfter = await User.findById(userId);
- expect(userAfter!.favorites).toHaveLength(2);
- expect(
- userAfter!.favorites!.some((f: Record) => f.agentId === agent1Id),
- ).toBe(false);
- expect(
- userAfter!.favorites!.some((f: Record) => f.agentId === agent2Id),
- ).toBe(false);
- expect(
- userAfter!.favorites!.some(
- (f: Record) => f.agentId === otherAuthorAgentId,
- ),
- ).toBe(true);
- expect(userAfter!.favorites!.some((f: Record) => f.model === 'gpt-4')).toBe(
- true,
- );
+ expect(userAfter.favorites).toHaveLength(2);
+ expect(userAfter.favorites.some((f) => f.agentId === agent1Id)).toBe(false);
+ expect(userAfter.favorites.some((f) => f.agentId === agent2Id)).toBe(false);
+ expect(userAfter.favorites.some((f) => f.agentId === otherAuthorAgentId)).toBe(true);
+ expect(userAfter.favorites.some((f) => f.model === 'gpt-4')).toBe(true);
});
test('should handle deleteUserAgents when agents are in multiple users favorites', async () => {
@@ -1014,7 +911,8 @@ describe('Agent Methods', () => {
const agent2Id = `agent_${uuidv4()}`;
const unrelatedAgentId = `agent_${uuidv4()}`;
- const agent1 = await createAgent({
+ // Create agents by the author
+ await createAgent({
id: agent1Id,
name: 'Author Agent 1',
provider: 'test',
@@ -1022,7 +920,7 @@ describe('Agent Methods', () => {
author: authorId,
});
- const agent2 = await createAgent({
+ await createAgent({
id: agent2Id,
name: 'Author Agent 2',
provider: 'test',
@@ -1030,29 +928,7 @@ describe('Agent Methods', () => {
author: authorId,
});
- const ownerBits =
- PermissionBits.VIEW | PermissionBits.EDIT | PermissionBits.DELETE | PermissionBits.SHARE;
- await AclEntry.create({
- principalType: PrincipalType.USER,
- principalId: authorId,
- principalModel: PrincipalModel.USER,
- resourceType: ResourceType.AGENT,
- resourceId: agent1._id,
- permBits: ownerBits,
- grantedBy: authorId,
- grantedAt: new Date(),
- });
- await AclEntry.create({
- principalType: PrincipalType.USER,
- principalId: authorId,
- principalModel: PrincipalModel.USER,
- resourceType: ResourceType.AGENT,
- resourceId: agent2._id,
- permBits: ownerBits,
- grantedBy: authorId,
- grantedAt: new Date(),
- });
-
+ // Create users with various favorites configurations
await User.create({
_id: user1Id,
name: 'User 1',
@@ -1077,28 +953,23 @@ describe('Agent Methods', () => {
favorites: [{ agentId: unrelatedAgentId }, { model: 'gpt-4', endpoint: 'openAI' }],
});
+ // Delete all agents by the author
await deleteUserAgents(authorId.toString());
+ // Verify all users' favorites are correctly updated
const user1After = await User.findById(user1Id);
- expect(user1After!.favorites).toHaveLength(0);
+ expect(user1After.favorites).toHaveLength(0);
const user2After = await User.findById(user2Id);
- expect(user2After!.favorites).toHaveLength(1);
- expect(
- user2After!.favorites!.some((f: Record) => f.agentId === agent1Id),
- ).toBe(false);
- expect(
- user2After!.favorites!.some((f: Record) => f.model === 'claude-3'),
- ).toBe(true);
+ expect(user2After.favorites).toHaveLength(1);
+ expect(user2After.favorites.some((f) => f.agentId === agent1Id)).toBe(false);
+ expect(user2After.favorites.some((f) => f.model === 'claude-3')).toBe(true);
+ // User 3 should be completely unaffected
const user3After = await User.findById(user3Id);
- expect(user3After!.favorites).toHaveLength(2);
- expect(
- user3After!.favorites!.some((f: Record) => f.agentId === unrelatedAgentId),
- ).toBe(true);
- expect(user3After!.favorites!.some((f: Record) => f.model === 'gpt-4')).toBe(
- true,
- );
+ expect(user3After.favorites).toHaveLength(2);
+ expect(user3After.favorites.some((f) => f.agentId === unrelatedAgentId)).toBe(true);
+ expect(user3After.favorites.some((f) => f.model === 'gpt-4')).toBe(true);
});
test('should handle deleteUserAgents when user has no agents', async () => {
@@ -1108,7 +979,8 @@ describe('Agent Methods', () => {
const existingAgentId = `agent_${uuidv4()}`;
- const existingAgent = await createAgent({
+ // Create agent by different author
+ await createAgent({
id: existingAgentId,
name: 'Existing Agent',
provider: 'test',
@@ -1116,19 +988,7 @@ describe('Agent Methods', () => {
author: otherAuthorId,
});
- const ownerBits =
- PermissionBits.VIEW | PermissionBits.EDIT | PermissionBits.DELETE | PermissionBits.SHARE;
- await AclEntry.create({
- principalType: PrincipalType.USER,
- principalId: otherAuthorId,
- principalModel: PrincipalModel.USER,
- resourceType: ResourceType.AGENT,
- resourceId: existingAgent._id,
- permBits: ownerBits,
- grantedBy: otherAuthorId,
- grantedAt: new Date(),
- });
-
+ // Create user with favorites
await User.create({
_id: userId,
name: 'Test User',
@@ -1137,18 +997,17 @@ describe('Agent Methods', () => {
favorites: [{ agentId: existingAgentId }, { model: 'gpt-4', endpoint: 'openAI' }],
});
+ // Delete agents for user with no agents (should be a no-op)
await deleteUserAgents(authorWithNoAgentsId.toString());
+ // Verify existing agent still exists
expect(await getAgent({ id: existingAgentId })).not.toBeNull();
+ // Verify user favorites are unchanged
const userAfter = await User.findById(userId);
- expect(userAfter!.favorites).toHaveLength(2);
- expect(
- userAfter!.favorites!.some((f: Record) => f.agentId === existingAgentId),
- ).toBe(true);
- expect(userAfter!.favorites!.some((f: Record) => f.model === 'gpt-4')).toBe(
- true,
- );
+ expect(userAfter.favorites).toHaveLength(2);
+ expect(userAfter.favorites.some((f) => f.agentId === existingAgentId)).toBe(true);
+ expect(userAfter.favorites.some((f) => f.model === 'gpt-4')).toBe(true);
});
test('should handle deleteUserAgents when agents are not in any favorites', async () => {
@@ -1158,7 +1017,8 @@ describe('Agent Methods', () => {
const agent1Id = `agent_${uuidv4()}`;
const agent2Id = `agent_${uuidv4()}`;
- const agent1 = await createAgent({
+ // Create agents by the author
+ await createAgent({
id: agent1Id,
name: 'Agent 1',
provider: 'test',
@@ -1166,7 +1026,7 @@ describe('Agent Methods', () => {
author: authorId,
});
- const agent2 = await createAgent({
+ await createAgent({
id: agent2Id,
name: 'Agent 2',
provider: 'test',
@@ -1174,29 +1034,7 @@ describe('Agent Methods', () => {
author: authorId,
});
- const ownerBits =
- PermissionBits.VIEW | PermissionBits.EDIT | PermissionBits.DELETE | PermissionBits.SHARE;
- await AclEntry.create({
- principalType: PrincipalType.USER,
- principalId: authorId,
- principalModel: PrincipalModel.USER,
- resourceType: ResourceType.AGENT,
- resourceId: agent1._id,
- permBits: ownerBits,
- grantedBy: authorId,
- grantedAt: new Date(),
- });
- await AclEntry.create({
- principalType: PrincipalType.USER,
- principalId: authorId,
- principalModel: PrincipalModel.USER,
- resourceType: ResourceType.AGENT,
- resourceId: agent2._id,
- permBits: ownerBits,
- grantedBy: authorId,
- grantedAt: new Date(),
- });
-
+ // Create user with favorites that don't include these agents
await User.create({
_id: userId,
name: 'Test User',
@@ -1205,119 +1043,130 @@ describe('Agent Methods', () => {
favorites: [{ model: 'gpt-4', endpoint: 'openAI' }],
});
+ // Verify agents exist
expect(await getAgent({ id: agent1Id })).not.toBeNull();
expect(await getAgent({ id: agent2Id })).not.toBeNull();
+ // Delete all agents by the author
await deleteUserAgents(authorId.toString());
+ // Verify agents are deleted
expect(await getAgent({ id: agent1Id })).toBeNull();
expect(await getAgent({ id: agent2Id })).toBeNull();
+ // Verify user favorites are unchanged
const userAfter = await User.findById(userId);
- expect(userAfter!.favorites).toHaveLength(1);
- expect(userAfter!.favorites!.some((f: Record) => f.model === 'gpt-4')).toBe(
- true,
- );
+ expect(userAfter.favorites).toHaveLength(1);
+ expect(userAfter.favorites.some((f) => f.model === 'gpt-4')).toBe(true);
});
- test('should preserve multi-owned agents when deleteUserAgents is called', async () => {
- const deletingUserId = new mongoose.Types.ObjectId();
- const otherOwnerId = new mongoose.Types.ObjectId();
-
- const soleOwnedId = `agent_${uuidv4()}`;
- const multiOwnedId = `agent_${uuidv4()}`;
-
- const soleAgent = await createAgent({
- id: soleOwnedId,
- name: 'Sole Owned Agent',
- provider: 'test',
- model: 'test-model',
- author: deletingUserId,
- });
-
- const multiAgent = await createAgent({
- id: multiOwnedId,
- name: 'Multi Owned Agent',
- provider: 'test',
- model: 'test-model',
- author: deletingUserId,
- });
-
- await AclEntry.create({
- principalType: PrincipalType.USER,
- principalId: deletingUserId,
- principalModel: PrincipalModel.USER,
- resourceType: ResourceType.AGENT,
- resourceId: (soleAgent as unknown as { _id: mongoose.Types.ObjectId })._id,
- permBits: PermissionBits.DELETE | PermissionBits.VIEW | PermissionBits.EDIT,
- });
-
- await AclEntry.create({
- principalType: PrincipalType.USER,
- principalId: deletingUserId,
- principalModel: PrincipalModel.USER,
- resourceType: ResourceType.AGENT,
- resourceId: (multiAgent as unknown as { _id: mongoose.Types.ObjectId })._id,
- permBits: PermissionBits.DELETE | PermissionBits.VIEW | PermissionBits.EDIT,
- });
- await AclEntry.create({
- principalType: PrincipalType.USER,
- principalId: otherOwnerId,
- principalModel: PrincipalModel.USER,
- resourceType: ResourceType.AGENT,
- resourceId: (multiAgent as unknown as { _id: mongoose.Types.ObjectId })._id,
- permBits: PermissionBits.DELETE | PermissionBits.VIEW | PermissionBits.EDIT,
- });
-
- await deleteUserAgents(deletingUserId.toString());
-
- expect(await getAgent({ id: soleOwnedId })).toBeNull();
- expect(await getAgent({ id: multiOwnedId })).not.toBeNull();
-
- const soleAcl = await AclEntry.find({
- resourceType: ResourceType.AGENT,
- resourceId: (soleAgent as unknown as { _id: mongoose.Types.ObjectId })._id,
- });
- expect(soleAcl).toHaveLength(0);
-
- const multiAcl = await AclEntry.find({
- resourceType: ResourceType.AGENT,
- resourceId: (multiAgent as unknown as { _id: mongoose.Types.ObjectId })._id,
- principalId: otherOwnerId,
- });
- expect(multiAcl).toHaveLength(1);
- expect(multiAcl[0].permBits & PermissionBits.DELETE).toBeTruthy();
-
- const deletingUserMultiAcl = await AclEntry.find({
- resourceType: ResourceType.AGENT,
- resourceId: (multiAgent as unknown as { _id: mongoose.Types.ObjectId })._id,
- principalId: deletingUserId,
- });
- expect(deletingUserMultiAcl).toHaveLength(1);
- });
-
- test('should delete legacy agents that have author but no ACL entries', async () => {
- const legacyUserId = new mongoose.Types.ObjectId();
- const legacyAgentId = `agent_${uuidv4()}`;
+ test('should update agent projects', async () => {
+ const agentId = `agent_${uuidv4()}`;
+ const authorId = new mongoose.Types.ObjectId();
+ const projectId1 = new mongoose.Types.ObjectId();
+ const projectId2 = new mongoose.Types.ObjectId();
+ const projectId3 = new mongoose.Types.ObjectId();
await createAgent({
- id: legacyAgentId,
- name: 'Legacy Agent (no ACL)',
+ id: agentId,
+ name: 'Project Test Agent',
provider: 'test',
model: 'test-model',
- author: legacyUserId,
+ author: authorId,
+ projectIds: [projectId1],
});
- await deleteUserAgents(legacyUserId.toString());
+ await updateAgent(
+ { id: agentId },
+ { $addToSet: { projectIds: { $each: [projectId2, projectId3] } } },
+ );
- expect(await getAgent({ id: legacyAgentId })).toBeNull();
+ await updateAgent({ id: agentId }, { $pull: { projectIds: projectId1 } });
+
+ await updateAgent({ id: agentId }, { projectIds: [projectId2, projectId3] });
+
+ const updatedAgent = await getAgent({ id: agentId });
+ expect(updatedAgent.projectIds).toHaveLength(2);
+ expect(updatedAgent.projectIds.map((id) => id.toString())).toContain(projectId2.toString());
+ expect(updatedAgent.projectIds.map((id) => id.toString())).toContain(projectId3.toString());
+ expect(updatedAgent.projectIds.map((id) => id.toString())).not.toContain(
+ projectId1.toString(),
+ );
+
+ await updateAgent({ id: agentId }, { projectIds: [] });
+
+ const emptyProjectsAgent = await getAgent({ id: agentId });
+ expect(emptyProjectsAgent.projectIds).toHaveLength(0);
+
+ const nonExistentId = `agent_${uuidv4()}`;
+ await expect(
+ updateAgentProjects({
+ id: nonExistentId,
+ projectIds: [projectId1],
+ }),
+ ).rejects.toThrow();
+ });
+
+ test('should handle ephemeral agent loading', async () => {
+ const agentId = 'ephemeral_test';
+ const endpoint = 'openai';
+
+ const originalModule = jest.requireActual('librechat-data-provider');
+
+ const mockDataProvider = {
+ ...originalModule,
+ Constants: {
+ ...originalModule.Constants,
+ EPHEMERAL_AGENT_ID: 'ephemeral_test',
+ },
+ };
+
+ jest.doMock('librechat-data-provider', () => mockDataProvider);
+
+ expect(agentId).toBeDefined();
+ expect(endpoint).toBeDefined();
+
+ jest.dontMock('librechat-data-provider');
+ });
+
+ test('should handle loadAgent functionality and errors', async () => {
+ const agentId = `agent_${uuidv4()}`;
+ const authorId = new mongoose.Types.ObjectId();
+
+ await createAgent({
+ id: agentId,
+ name: 'Test Load Agent',
+ provider: 'test',
+ model: 'test-model',
+ author: authorId,
+ tools: ['tool1', 'tool2'],
+ });
+
+ const agent = await getAgent({ id: agentId });
+
+ expect(agent).toBeDefined();
+ expect(agent.id).toBe(agentId);
+ expect(agent.name).toBe('Test Load Agent');
+ expect(agent.tools).toEqual(expect.arrayContaining(['tool1', 'tool2']));
+
+ const mockLoadAgent = jest.fn().mockResolvedValue(agent);
+ const loadedAgent = await mockLoadAgent();
+ expect(loadedAgent).toBeDefined();
+ expect(loadedAgent.id).toBe(agentId);
+
+ const nonExistentId = `agent_${uuidv4()}`;
+ const nonExistentAgent = await getAgent({ id: nonExistentId });
+ expect(nonExistentAgent).toBeNull();
+
+ const mockLoadAgentError = jest.fn().mockRejectedValue(new Error('No agent found with ID'));
+ await expect(mockLoadAgentError()).rejects.toThrow('No agent found with ID');
});
describe('Edge Cases', () => {
test.each([
{
name: 'getAgent with undefined search parameters',
- fn: () => getAgent(undefined as unknown as Parameters[0]),
+ fn: () => getAgent(undefined),
expected: null,
},
{
@@ -1329,10 +1178,38 @@ describe('Agent Methods', () => {
const result = await fn();
expect(result).toBe(expected);
});
+
+ test('should handle updateAgentProjects with non-existent agent', async () => {
+ const nonExistentId = `agent_${uuidv4()}`;
+ const userId = new mongoose.Types.ObjectId();
+ const projectId = new mongoose.Types.ObjectId();
+
+ const result = await updateAgentProjects({
+ user: { id: userId.toString() },
+ agentId: nonExistentId,
+ projectIds: [projectId.toString()],
+ });
+
+ expect(result).toBeNull();
+ });
});
});
describe('Agent Version History', () => {
+ let mongoServer;
+
+ beforeAll(async () => {
+ mongoServer = await MongoMemoryServer.create();
+ const mongoUri = mongoServer.getUri();
+ Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
+ await mongoose.connect(mongoUri);
+ }, 20000);
+
+ afterAll(async () => {
+ await mongoose.disconnect();
+ await mongoServer.stop();
+ });
+
beforeEach(async () => {
await Agent.deleteMany({});
});
@@ -1340,12 +1217,12 @@ describe('Agent Methods', () => {
test('should create an agent with a single entry in versions array', async () => {
const agent = await createBasicAgent();
- expect(agent!.versions).toBeDefined();
+ expect(agent.versions).toBeDefined();
expect(Array.isArray(agent.versions)).toBe(true);
- expect(agent!.versions).toHaveLength(1);
- expect(agent!.versions![0].name).toBe('Test Agent');
- expect(agent!.versions![0].provider).toBe('test');
- expect(agent!.versions![0].model).toBe('test-model');
+ expect(agent.versions).toHaveLength(1);
+ expect(agent.versions[0].name).toBe('Test Agent');
+ expect(agent.versions[0].provider).toBe('test');
+ expect(agent.versions[0].model).toBe('test-model');
});
test('should accumulate version history across multiple updates', async () => {
@@ -1367,29 +1244,29 @@ describe('Agent Methods', () => {
await updateAgent({ id: agentId }, { name: 'Third Name', model: 'new-model' });
const finalAgent = await updateAgent({ id: agentId }, { description: 'Final description' });
- expect(finalAgent!.versions).toBeDefined();
- expect(Array.isArray(finalAgent!.versions)).toBe(true);
- expect(finalAgent!.versions).toHaveLength(4);
+ expect(finalAgent.versions).toBeDefined();
+ expect(Array.isArray(finalAgent.versions)).toBe(true);
+ expect(finalAgent.versions).toHaveLength(4);
- expect(finalAgent!.versions![0].name).toBe('First Name');
- expect(finalAgent!.versions![0].description).toBe('First description');
- expect(finalAgent!.versions![0].model).toBe('test-model');
+ expect(finalAgent.versions[0].name).toBe('First Name');
+ expect(finalAgent.versions[0].description).toBe('First description');
+ expect(finalAgent.versions[0].model).toBe('test-model');
- expect(finalAgent!.versions![1].name).toBe('Second Name');
- expect(finalAgent!.versions![1].description).toBe('Second description');
- expect(finalAgent!.versions![1].model).toBe('test-model');
+ expect(finalAgent.versions[1].name).toBe('Second Name');
+ expect(finalAgent.versions[1].description).toBe('Second description');
+ expect(finalAgent.versions[1].model).toBe('test-model');
- expect(finalAgent!.versions![2].name).toBe('Third Name');
- expect(finalAgent!.versions![2].description).toBe('Second description');
- expect(finalAgent!.versions![2].model).toBe('new-model');
+ expect(finalAgent.versions[2].name).toBe('Third Name');
+ expect(finalAgent.versions[2].description).toBe('Second description');
+ expect(finalAgent.versions[2].model).toBe('new-model');
- expect(finalAgent!.versions![3].name).toBe('Third Name');
- expect(finalAgent!.versions![3].description).toBe('Final description');
- expect(finalAgent!.versions![3].model).toBe('new-model');
+ expect(finalAgent.versions[3].name).toBe('Third Name');
+ expect(finalAgent.versions[3].description).toBe('Final description');
+ expect(finalAgent.versions[3].model).toBe('new-model');
- expect(finalAgent!.name).toBe('Third Name');
- expect(finalAgent!.description).toBe('Final description');
- expect(finalAgent!.model).toBe('new-model');
+ expect(finalAgent.name).toBe('Third Name');
+ expect(finalAgent.description).toBe('Final description');
+ expect(finalAgent.model).toBe('new-model');
});
test('should not include metadata fields in version history', async () => {
@@ -1404,14 +1281,14 @@ describe('Agent Methods', () => {
const updatedAgent = await updateAgent({ id: agentId }, { description: 'New description' });
- expect(updatedAgent!.versions).toHaveLength(2);
- expect(updatedAgent!.versions![0]._id).toBeUndefined();
- expect((updatedAgent!.versions![0] as VersionEntry).__v).toBeUndefined();
- expect(updatedAgent!.versions![0].name).toBe('Test Agent');
- expect(updatedAgent!.versions![0].author).toBeUndefined();
+ expect(updatedAgent.versions).toHaveLength(2);
+ expect(updatedAgent.versions[0]._id).toBeUndefined();
+ expect(updatedAgent.versions[0].__v).toBeUndefined();
+ expect(updatedAgent.versions[0].name).toBe('Test Agent');
+ expect(updatedAgent.versions[0].author).toBeUndefined();
- expect(updatedAgent!.versions![1]._id).toBeUndefined();
- expect((updatedAgent!.versions![1] as VersionEntry).__v).toBeUndefined();
+ expect(updatedAgent.versions[1]._id).toBeUndefined();
+ expect(updatedAgent.versions[1].__v).toBeUndefined();
});
test('should not recursively include previous versions', async () => {
@@ -1428,16 +1305,17 @@ describe('Agent Methods', () => {
await updateAgent({ id: agentId }, { name: 'Updated Name 2' });
const finalAgent = await updateAgent({ id: agentId }, { name: 'Updated Name 3' });
- expect(finalAgent!.versions).toHaveLength(4);
+ expect(finalAgent.versions).toHaveLength(4);
- finalAgent!.versions!.forEach((version) => {
- expect((version as VersionEntry).versions).toBeUndefined();
+ finalAgent.versions.forEach((version) => {
+ expect(version.versions).toBeUndefined();
});
});
test('should handle MongoDB operators and field updates correctly', async () => {
const agentId = `agent_${uuidv4()}`;
const authorId = new mongoose.Types.ObjectId();
+ const projectId = new mongoose.Types.ObjectId();
await createAgent({
id: agentId,
@@ -1453,14 +1331,16 @@ describe('Agent Methods', () => {
{
description: 'Updated description',
$push: { tools: 'tool2' },
+ $addToSet: { projectIds: projectId },
},
);
const firstUpdate = await getAgent({ id: agentId });
- expect(firstUpdate!.description).toBe('Updated description');
- expect(firstUpdate!.tools).toContain('tool1');
- expect(firstUpdate!.tools).toContain('tool2');
- expect(firstUpdate!.versions).toHaveLength(2);
+ expect(firstUpdate.description).toBe('Updated description');
+ expect(firstUpdate.tools).toContain('tool1');
+ expect(firstUpdate.tools).toContain('tool2');
+ expect(firstUpdate.projectIds.map((id) => id.toString())).toContain(projectId.toString());
+ expect(firstUpdate.versions).toHaveLength(2);
await updateAgent(
{ id: agentId },
@@ -1470,11 +1350,11 @@ describe('Agent Methods', () => {
);
const secondUpdate = await getAgent({ id: agentId });
- expect(secondUpdate!.tools).toHaveLength(2);
- expect(secondUpdate!.tools).toContain('tool2');
- expect(secondUpdate!.tools).toContain('tool3');
- expect(secondUpdate!.tools).not.toContain('tool1');
- expect(secondUpdate!.versions).toHaveLength(3);
+ expect(secondUpdate.tools).toHaveLength(2);
+ expect(secondUpdate.tools).toContain('tool2');
+ expect(secondUpdate.tools).toContain('tool3');
+ expect(secondUpdate.tools).not.toContain('tool1');
+ expect(secondUpdate.versions).toHaveLength(3);
await updateAgent(
{ id: agentId },
@@ -1484,9 +1364,9 @@ describe('Agent Methods', () => {
);
const thirdUpdate = await getAgent({ id: agentId });
- const toolCount = thirdUpdate!.tools!.filter((t) => t === 'tool3').length;
+ const toolCount = thirdUpdate.tools.filter((t) => t === 'tool3').length;
expect(toolCount).toBe(2);
- expect(thirdUpdate!.versions).toHaveLength(4);
+ expect(thirdUpdate.versions).toHaveLength(4);
});
test('should handle parameter objects correctly', async () => {
@@ -1507,8 +1387,8 @@ describe('Agent Methods', () => {
{ model_parameters: { temperature: 0.8 } },
);
- expect(updatedAgent!.versions).toHaveLength(2);
- expect(updatedAgent!.model_parameters?.temperature).toBe(0.8);
+ expect(updatedAgent.versions).toHaveLength(2);
+ expect(updatedAgent.model_parameters.temperature).toBe(0.8);
await updateAgent(
{ id: agentId },
@@ -1521,15 +1401,15 @@ describe('Agent Methods', () => {
);
const complexAgent = await getAgent({ id: agentId });
- expect(complexAgent!.versions).toHaveLength(3);
- expect(complexAgent!.model_parameters?.temperature).toBe(0.8);
- expect(complexAgent!.model_parameters?.max_tokens).toBe(1000);
+ expect(complexAgent.versions).toHaveLength(3);
+ expect(complexAgent.model_parameters.temperature).toBe(0.8);
+ expect(complexAgent.model_parameters.max_tokens).toBe(1000);
await updateAgent({ id: agentId }, { model_parameters: {} });
const emptyParamsAgent = await getAgent({ id: agentId });
- expect(emptyParamsAgent!.versions).toHaveLength(4);
- expect(emptyParamsAgent!.model_parameters).toEqual({});
+ expect(emptyParamsAgent.versions).toHaveLength(4);
+ expect(emptyParamsAgent.model_parameters).toEqual({});
});
test('should not create new version for duplicate updates', async () => {
@@ -1548,15 +1428,15 @@ describe('Agent Methods', () => {
});
const updatedAgent = await updateAgent({ id: testAgentId }, testCase.update);
- expect(updatedAgent!.versions).toHaveLength(2); // No new version created
+ expect(updatedAgent.versions).toHaveLength(2); // No new version created
// Update with duplicate data should succeed but not create a new version
const duplicateUpdate = await updateAgent({ id: testAgentId }, testCase.duplicate);
- expect(duplicateUpdate!.versions).toHaveLength(2); // No new version created
+ expect(duplicateUpdate.versions).toHaveLength(2); // No new version created
const agent = await getAgent({ id: testAgentId });
- expect(agent!.versions).toHaveLength(2);
+ expect(agent.versions).toHaveLength(2);
}
});
@@ -1580,11 +1460,9 @@ describe('Agent Methods', () => {
{ updatingUserId: updatingUser.toString() },
);
- expect(updatedAgent!.versions).toHaveLength(2);
- expect((updatedAgent!.versions![1] as VersionEntry)?.updatedBy?.toString()).toBe(
- updatingUser.toString(),
- );
- expect(updatedAgent!.author.toString()).toBe(originalAuthor.toString());
+ expect(updatedAgent.versions).toHaveLength(2);
+ expect(updatedAgent.versions[1].updatedBy.toString()).toBe(updatingUser.toString());
+ expect(updatedAgent.author.toString()).toBe(originalAuthor.toString());
});
test('should include updatedBy even when the original author updates the agent', async () => {
@@ -1606,11 +1484,9 @@ describe('Agent Methods', () => {
{ updatingUserId: originalAuthor.toString() },
);
- expect(updatedAgent!.versions).toHaveLength(2);
- expect((updatedAgent!.versions![1] as VersionEntry)?.updatedBy?.toString()).toBe(
- originalAuthor.toString(),
- );
- expect(updatedAgent!.author.toString()).toBe(originalAuthor.toString());
+ expect(updatedAgent.versions).toHaveLength(2);
+ expect(updatedAgent.versions[1].updatedBy.toString()).toBe(originalAuthor.toString());
+ expect(updatedAgent.author.toString()).toBe(originalAuthor.toString());
});
test('should track multiple different users updating the same agent', async () => {
@@ -1657,21 +1533,20 @@ describe('Agent Methods', () => {
{ updatingUserId: user3.toString() },
);
- expect(finalAgent!.versions).toHaveLength(5);
- expect(finalAgent!.author.toString()).toBe(originalAuthor.toString());
+ expect(finalAgent.versions).toHaveLength(5);
+ expect(finalAgent.author.toString()).toBe(originalAuthor.toString());
// Check that each version has the correct updatedBy
- const versions = finalAgent!.versions! as VersionEntry[];
- expect(versions[0]?.updatedBy).toBeUndefined(); // Initial creation has no updatedBy
- expect(versions[1]?.updatedBy?.toString()).toBe(user1.toString());
- expect(versions[2]?.updatedBy?.toString()).toBe(originalAuthor.toString());
- expect(versions[3]?.updatedBy?.toString()).toBe(user2.toString());
- expect(versions[4]?.updatedBy?.toString()).toBe(user3.toString());
+ expect(finalAgent.versions[0].updatedBy).toBeUndefined(); // Initial creation has no updatedBy
+ expect(finalAgent.versions[1].updatedBy.toString()).toBe(user1.toString());
+ expect(finalAgent.versions[2].updatedBy.toString()).toBe(originalAuthor.toString());
+ expect(finalAgent.versions[3].updatedBy.toString()).toBe(user2.toString());
+ expect(finalAgent.versions[4].updatedBy.toString()).toBe(user3.toString());
// Verify the final state
- expect(finalAgent!.name).toBe('Updated by User 2');
- expect(finalAgent!.description).toBe('Final update by User 3');
- expect(finalAgent!.model).toBe('new-model');
+ expect(finalAgent.name).toBe('Updated by User 2');
+ expect(finalAgent.description).toBe('Final update by User 3');
+ expect(finalAgent.model).toBe('new-model');
});
test('should preserve original author during agent restoration', async () => {
@@ -1694,6 +1569,7 @@ describe('Agent Methods', () => {
{ updatingUserId: updatingUser.toString() },
);
+ const { revertAgentVersion } = require('./Agent');
const revertedAgent = await revertAgentVersion({ id: agentId }, 0);
expect(revertedAgent.author.toString()).toBe(originalAuthor.toString());
@@ -1724,7 +1600,7 @@ describe('Agent Methods', () => {
{ updatingUserId: authorId.toString(), forceVersion: true },
);
- expect(firstUpdate!.versions).toHaveLength(2);
+ expect(firstUpdate.versions).toHaveLength(2);
// Second update with same data but forceVersion should still create a version
const secondUpdate = await updateAgent(
@@ -1733,7 +1609,7 @@ describe('Agent Methods', () => {
{ updatingUserId: authorId.toString(), forceVersion: true },
);
- expect(secondUpdate!.versions).toHaveLength(3);
+ expect(secondUpdate.versions).toHaveLength(3);
// Update without forceVersion and no changes should not create a version
const duplicateUpdate = await updateAgent(
@@ -1742,7 +1618,7 @@ describe('Agent Methods', () => {
{ updatingUserId: authorId.toString(), forceVersion: false },
);
- expect(duplicateUpdate!.versions).toHaveLength(3); // No new version created
+ expect(duplicateUpdate.versions).toHaveLength(3); // No new version created
});
test('should handle isDuplicateVersion with arrays containing null/undefined values', async () => {
@@ -1761,8 +1637,8 @@ describe('Agent Methods', () => {
// Update with same array but different null/undefined arrangement
const updatedAgent = await updateAgent({ id: agentId }, { tools: ['tool1', 'tool2'] });
- expect(updatedAgent!.versions).toHaveLength(2);
- expect(updatedAgent!.tools).toEqual(['tool1', 'tool2']);
+ expect(updatedAgent.versions).toHaveLength(2);
+ expect(updatedAgent.tools).toEqual(['tool1', 'tool2']);
});
test('should handle isDuplicateVersion with empty objects in tool_kwargs', async () => {
@@ -1795,7 +1671,7 @@ describe('Agent Methods', () => {
);
// Should create new version as order matters for arrays
- expect(updatedAgent!.versions).toHaveLength(2);
+ expect(updatedAgent.versions).toHaveLength(2);
});
test('should handle isDuplicateVersion with mixed primitive and object arrays', async () => {
@@ -1818,7 +1694,7 @@ describe('Agent Methods', () => {
);
// Should create new version as types differ
- expect(updatedAgent!.versions).toHaveLength(2);
+ expect(updatedAgent.versions).toHaveLength(2);
});
test('should handle isDuplicateVersion with deeply nested objects', async () => {
@@ -1862,12 +1738,13 @@ describe('Agent Methods', () => {
// Since we're updating back to the same model_parameters but with a different description,
// it should create a new version
const agent = await getAgent({ id: agentId });
- expect(agent!.versions).toHaveLength(3);
+ expect(agent.versions).toHaveLength(3);
});
test('should handle version comparison with special field types', async () => {
const agentId = `agent_${uuidv4()}`;
const authorId = new mongoose.Types.ObjectId();
+ const projectId = new mongoose.Types.ObjectId();
await createAgent({
id: agentId,
@@ -1875,13 +1752,14 @@ describe('Agent Methods', () => {
provider: 'test',
model: 'test-model',
author: authorId,
+ projectIds: [projectId],
model_parameters: { temperature: 0.7 },
});
// Update with a real field change first
const firstUpdate = await updateAgent({ id: agentId }, { description: 'New description' });
- expect(firstUpdate!.versions).toHaveLength(2);
+ expect(firstUpdate.versions).toHaveLength(2);
// Update with model parameters change
const secondUpdate = await updateAgent(
@@ -1889,7 +1767,7 @@ describe('Agent Methods', () => {
{ model_parameters: { temperature: 0.8 } },
);
- expect(secondUpdate!.versions).toHaveLength(3);
+ expect(secondUpdate.versions).toHaveLength(3);
});
test('should detect changes in support_contact fields', async () => {
@@ -1920,9 +1798,9 @@ describe('Agent Methods', () => {
},
);
- expect(firstUpdate!.versions).toHaveLength(2);
- expect(firstUpdate!.support_contact?.name).toBe('Updated Support');
- expect(firstUpdate!.support_contact?.email).toBe('initial@support.com');
+ expect(firstUpdate.versions).toHaveLength(2);
+ expect(firstUpdate.support_contact.name).toBe('Updated Support');
+ expect(firstUpdate.support_contact.email).toBe('initial@support.com');
// Update support_contact email only
const secondUpdate = await updateAgent(
@@ -1935,8 +1813,8 @@ describe('Agent Methods', () => {
},
);
- expect(secondUpdate!.versions).toHaveLength(3);
- expect(secondUpdate!.support_contact?.email).toBe('updated@support.com');
+ expect(secondUpdate.versions).toHaveLength(3);
+ expect(secondUpdate.support_contact.email).toBe('updated@support.com');
// Try to update with same support_contact - should be detected as duplicate but return successfully
const duplicateUpdate = await updateAgent(
@@ -1950,9 +1828,9 @@ describe('Agent Methods', () => {
);
// Should not create a new version
- expect(duplicateUpdate?.versions).toHaveLength(3);
- expect((duplicateUpdate as IAgent & { version?: number })?.version).toBe(3);
- expect(duplicateUpdate?.support_contact?.email).toBe('updated@support.com');
+ expect(duplicateUpdate.versions).toHaveLength(3);
+ expect(duplicateUpdate.version).toBe(3);
+ expect(duplicateUpdate.support_contact.email).toBe('updated@support.com');
});
test('should handle support_contact from empty to populated', async () => {
@@ -1982,9 +1860,9 @@ describe('Agent Methods', () => {
},
);
- expect(updated?.versions).toHaveLength(2);
- expect(updated?.support_contact?.name).toBe('New Support Team');
- expect(updated?.support_contact?.email).toBe('support@example.com');
+ expect(updated.versions).toHaveLength(2);
+ expect(updated.support_contact.name).toBe('New Support Team');
+ expect(updated.support_contact.email).toBe('support@example.com');
});
test('should handle support_contact edge cases in isDuplicateVersion', async () => {
@@ -2012,8 +1890,8 @@ describe('Agent Methods', () => {
},
);
- expect(emptyUpdate?.versions).toHaveLength(2);
- expect(emptyUpdate?.support_contact).toEqual({});
+ expect(emptyUpdate.versions).toHaveLength(2);
+ expect(emptyUpdate.support_contact).toEqual({});
// Update back to populated support_contact
const repopulated = await updateAgent(
@@ -2026,16 +1904,16 @@ describe('Agent Methods', () => {
},
);
- expect(repopulated?.versions).toHaveLength(3);
+ expect(repopulated.versions).toHaveLength(3);
// Verify all versions have correct support_contact
const finalAgent = await getAgent({ id: agentId });
- expect(finalAgent!.versions![0]?.support_contact).toEqual({
+ expect(finalAgent.versions[0].support_contact).toEqual({
name: 'Support',
email: 'support@test.com',
});
- expect(finalAgent!.versions![1]?.support_contact).toEqual({});
- expect(finalAgent!.versions![2]?.support_contact).toEqual({
+ expect(finalAgent.versions[1].support_contact).toEqual({});
+ expect(finalAgent.versions[2].support_contact).toEqual({
name: 'Support',
email: 'support@test.com',
});
@@ -2082,22 +1960,22 @@ describe('Agent Methods', () => {
const finalAgent = await getAgent({ id: agentId });
// Verify version history
- expect(finalAgent!.versions).toHaveLength(3);
- expect(finalAgent!.versions![0]?.support_contact).toEqual({
+ expect(finalAgent.versions).toHaveLength(3);
+ expect(finalAgent.versions[0].support_contact).toEqual({
name: 'Initial Contact',
email: 'initial@test.com',
});
- expect(finalAgent!.versions![1]?.support_contact).toEqual({
+ expect(finalAgent.versions[1].support_contact).toEqual({
name: 'Second Contact',
email: 'second@test.com',
});
- expect(finalAgent!.versions![2]?.support_contact).toEqual({
+ expect(finalAgent.versions[2].support_contact).toEqual({
name: 'Third Contact',
email: 'third@test.com',
});
// Current state should match last version
- expect(finalAgent!.support_contact).toEqual({
+ expect(finalAgent.support_contact).toEqual({
name: 'Third Contact',
email: 'third@test.com',
});
@@ -2132,9 +2010,9 @@ describe('Agent Methods', () => {
},
);
- expect(updated?.versions).toHaveLength(2);
- expect(updated?.support_contact?.name).toBe('New Name');
- expect(updated?.support_contact?.email).toBe('');
+ expect(updated.versions).toHaveLength(2);
+ expect(updated.support_contact.name).toBe('New Name');
+ expect(updated.support_contact.email).toBe('');
// Verify isDuplicateVersion works with partial changes - should return successfully without creating new version
const duplicateUpdate = await updateAgent(
@@ -2148,10 +2026,10 @@ describe('Agent Methods', () => {
);
// Should not create a new version since content is the same
- expect(duplicateUpdate?.versions).toHaveLength(2);
- expect((duplicateUpdate as IAgent & { version?: number })?.version).toBe(2);
- expect(duplicateUpdate?.support_contact?.name).toBe('New Name');
- expect(duplicateUpdate?.support_contact?.email).toBe('');
+ expect(duplicateUpdate.versions).toHaveLength(2);
+ expect(duplicateUpdate.version).toBe(2);
+ expect(duplicateUpdate.support_contact.name).toBe('New Name');
+ expect(duplicateUpdate.support_contact.email).toBe('');
});
// Edge Cases
@@ -2174,7 +2052,7 @@ describe('Agent Methods', () => {
])('addAgentResourceFile with $name', ({ needsAgent, params, shouldResolve, error }) => {
test(`should ${shouldResolve ? 'resolve' : 'reject'}`, async () => {
const agent = needsAgent ? await createBasicAgent() : null;
- const agent_id = needsAgent ? agent!.id : `agent_${uuidv4()}`;
+ const agent_id = needsAgent ? agent.id : `agent_${uuidv4()}`;
if (shouldResolve) {
await expect(addAgentResourceFile({ agent_id, ...params })).resolves.toBeDefined();
@@ -2207,7 +2085,7 @@ describe('Agent Methods', () => {
])('removeAgentResourceFiles with $name', ({ files, needsAgent, shouldResolve, error }) => {
test(`should ${shouldResolve ? 'resolve' : 'reject'}`, async () => {
const agent = needsAgent ? await createBasicAgent() : null;
- const agent_id = needsAgent ? agent!.id : `agent_${uuidv4()}`;
+ const agent_id = needsAgent ? agent.id : `agent_${uuidv4()}`;
if (shouldResolve) {
const result = await removeAgentResourceFiles({ agent_id, files });
@@ -2239,8 +2117,8 @@ describe('Agent Methods', () => {
}
const agent = await getAgent({ id: agentId });
- expect(agent!.versions).toHaveLength(21);
- expect(agent!.description).toBe('Version 19');
+ expect(agent.versions).toHaveLength(21);
+ expect(agent.description).toBe('Version 19');
});
test('should handle revertAgentVersion with invalid version index', async () => {
@@ -2281,13 +2159,27 @@ describe('Agent Methods', () => {
const updatedAgent = await updateAgent({ id: agentId }, {});
expect(updatedAgent).toBeDefined();
- expect(updatedAgent!.name).toBe('Test Agent');
- expect(updatedAgent!.versions).toHaveLength(1);
+ expect(updatedAgent.name).toBe('Test Agent');
+ expect(updatedAgent.versions).toHaveLength(1);
});
});
});
describe('Action Metadata and Hash Generation', () => {
+ let mongoServer;
+
+ beforeAll(async () => {
+ mongoServer = await MongoMemoryServer.create();
+ const mongoUri = mongoServer.getUri();
+ Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
+ await mongoose.connect(mongoUri);
+ }, 20000);
+
+ afterAll(async () => {
+ await mongoose.disconnect();
+ await mongoServer.stop();
+ });
+
beforeEach(async () => {
await Agent.deleteMany({});
});
@@ -2455,9 +2347,332 @@ describe('Agent Methods', () => {
});
});
- /* Load Agent Functionality tests moved to api/models/Agent.spec.js */
+ describe('Load Agent Functionality', () => {
+ let mongoServer;
+
+ beforeAll(async () => {
+ mongoServer = await MongoMemoryServer.create();
+ const mongoUri = mongoServer.getUri();
+ Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
+ await mongoose.connect(mongoUri);
+ }, 20000);
+
+ afterAll(async () => {
+ await mongoose.disconnect();
+ await mongoServer.stop();
+ });
+
+ beforeEach(async () => {
+ await Agent.deleteMany({});
+ });
+
+ test('should return null when agent_id is not provided', async () => {
+ const mockReq = { user: { id: 'user123' } };
+ const result = await loadAgent({
+ req: mockReq,
+ agent_id: null,
+ endpoint: 'openai',
+ model_parameters: { model: 'gpt-4' },
+ });
+
+ expect(result).toBeNull();
+ });
+
+ test('should return null when agent_id is empty string', async () => {
+ const mockReq = { user: { id: 'user123' } };
+ const result = await loadAgent({
+ req: mockReq,
+ agent_id: '',
+ endpoint: 'openai',
+ model_parameters: { model: 'gpt-4' },
+ });
+
+ expect(result).toBeNull();
+ });
+
+ test('should test ephemeral agent loading logic', async () => {
+ const { EPHEMERAL_AGENT_ID } = require('librechat-data-provider').Constants;
+
+ getCachedTools.mockResolvedValue({
+ tool1_mcp_server1: {},
+ tool2_mcp_server2: {},
+ another_tool: {},
+ });
+
+ // Mock getMCPServerTools to return tools for each server
+ getMCPServerTools.mockImplementation(async (_userId, server) => {
+ if (server === 'server1') {
+ return { tool1_mcp_server1: {} };
+ } else if (server === 'server2') {
+ return { tool2_mcp_server2: {} };
+ }
+ return null;
+ });
+
+ const mockReq = {
+ user: { id: 'user123' },
+ body: {
+ promptPrefix: 'Test instructions',
+ ephemeralAgent: {
+ execute_code: true,
+ web_search: true,
+ mcp: ['server1', 'server2'],
+ },
+ },
+ };
+
+ const result = await loadAgent({
+ req: mockReq,
+ agent_id: EPHEMERAL_AGENT_ID,
+ endpoint: 'openai',
+ model_parameters: { model: 'gpt-4', temperature: 0.7 },
+ });
+
+ if (result) {
+ // Ephemeral agent ID is encoded with endpoint and model
+ expect(result.id).toBe('openai__gpt-4');
+ expect(result.instructions).toBe('Test instructions');
+ expect(result.provider).toBe('openai');
+ expect(result.model).toBe('gpt-4');
+ expect(result.model_parameters.temperature).toBe(0.7);
+ expect(result.tools).toContain('execute_code');
+ expect(result.tools).toContain('web_search');
+ expect(result.tools).toContain('tool1_mcp_server1');
+ expect(result.tools).toContain('tool2_mcp_server2');
+ } else {
+ expect(result).toBeNull();
+ }
+ });
+
+ test('should return null for non-existent agent', async () => {
+ const mockReq = { user: { id: 'user123' } };
+ const result = await loadAgent({
+ req: mockReq,
+ agent_id: 'agent_non_existent',
+ endpoint: 'openai',
+ model_parameters: { model: 'gpt-4' },
+ });
+
+ expect(result).toBeNull();
+ });
+
+ test('should load agent when user is the author', async () => {
+ const userId = new mongoose.Types.ObjectId();
+ const agentId = `agent_${uuidv4()}`;
+
+ await createAgent({
+ id: agentId,
+ name: 'Test Agent',
+ provider: 'openai',
+ model: 'gpt-4',
+ author: userId,
+ description: 'Test description',
+ tools: ['web_search'],
+ });
+
+ const mockReq = { user: { id: userId.toString() } };
+ const result = await loadAgent({
+ req: mockReq,
+ agent_id: agentId,
+ endpoint: 'openai',
+ model_parameters: { model: 'gpt-4' },
+ });
+
+ expect(result).toBeDefined();
+ expect(result.id).toBe(agentId);
+ expect(result.name).toBe('Test Agent');
+ expect(result.author.toString()).toBe(userId.toString());
+ expect(result.version).toBe(1);
+ });
+
+ test('should return agent even when user is not author (permissions checked at route level)', async () => {
+ const authorId = new mongoose.Types.ObjectId();
+ const userId = new mongoose.Types.ObjectId();
+ const agentId = `agent_${uuidv4()}`;
+
+ await createAgent({
+ id: agentId,
+ name: 'Test Agent',
+ provider: 'openai',
+ model: 'gpt-4',
+ author: authorId,
+ });
+
+ const mockReq = { user: { id: userId.toString() } };
+ const result = await loadAgent({
+ req: mockReq,
+ agent_id: agentId,
+ endpoint: 'openai',
+ model_parameters: { model: 'gpt-4' },
+ });
+
+ // With the new permission system, loadAgent returns the agent regardless of permissions
+ // Permission checks are handled at the route level via middleware
+ expect(result).toBeTruthy();
+ expect(result.id).toBe(agentId);
+ expect(result.name).toBe('Test Agent');
+ });
+
+ test('should handle ephemeral agent with no MCP servers', async () => {
+ const { EPHEMERAL_AGENT_ID } = require('librechat-data-provider').Constants;
+
+ getCachedTools.mockResolvedValue({});
+
+ const mockReq = {
+ user: { id: 'user123' },
+ body: {
+ promptPrefix: 'Simple instructions',
+ ephemeralAgent: {
+ execute_code: false,
+ web_search: false,
+ mcp: [],
+ },
+ },
+ };
+
+ const result = await loadAgent({
+ req: mockReq,
+ agent_id: EPHEMERAL_AGENT_ID,
+ endpoint: 'openai',
+ model_parameters: { model: 'gpt-3.5-turbo' },
+ });
+
+ if (result) {
+ expect(result.tools).toEqual([]);
+ expect(result.instructions).toBe('Simple instructions');
+ } else {
+ expect(result).toBeFalsy();
+ }
+ });
+
+ test('should handle ephemeral agent with undefined ephemeralAgent in body', async () => {
+ const { EPHEMERAL_AGENT_ID } = require('librechat-data-provider').Constants;
+
+ getCachedTools.mockResolvedValue({});
+
+ const mockReq = {
+ user: { id: 'user123' },
+ body: {
+ promptPrefix: 'Basic instructions',
+ },
+ };
+
+ const result = await loadAgent({
+ req: mockReq,
+ agent_id: EPHEMERAL_AGENT_ID,
+ endpoint: 'openai',
+ model_parameters: { model: 'gpt-4' },
+ });
+
+ if (result) {
+ expect(result.tools).toEqual([]);
+ } else {
+ expect(result).toBeFalsy();
+ }
+ });
+
+ describe('Edge Cases', () => {
+ test('should handle loadAgent with malformed req object', async () => {
+ const result = await loadAgent({
+ req: null,
+ agent_id: 'agent_test',
+ endpoint: 'openai',
+ model_parameters: { model: 'gpt-4' },
+ });
+
+ expect(result).toBeNull();
+ });
+
+ test('should handle ephemeral agent with extremely large tool list', async () => {
+ const { EPHEMERAL_AGENT_ID } = require('librechat-data-provider').Constants;
+
+ const largeToolList = Array.from({ length: 100 }, (_, i) => `tool_${i}_mcp_server1`);
+ const availableTools = largeToolList.reduce((acc, tool) => {
+ acc[tool] = {};
+ return acc;
+ }, {});
+
+ getCachedTools.mockResolvedValue(availableTools);
+
+ // Mock getMCPServerTools to return all tools for server1
+ getMCPServerTools.mockImplementation(async (_userId, server) => {
+ if (server === 'server1') {
+ return availableTools; // All 100 tools belong to server1
+ }
+ return null;
+ });
+
+ const mockReq = {
+ user: { id: 'user123' },
+ body: {
+ promptPrefix: 'Test',
+ ephemeralAgent: {
+ execute_code: true,
+ web_search: true,
+ mcp: ['server1'],
+ },
+ },
+ };
+
+ const result = await loadAgent({
+ req: mockReq,
+ agent_id: EPHEMERAL_AGENT_ID,
+ endpoint: 'openai',
+ model_parameters: { model: 'gpt-4' },
+ });
+
+ if (result) {
+ expect(result.tools.length).toBeGreaterThan(100);
+ }
+ });
+
+ test('should return agent from different project (permissions checked at route level)', async () => {
+ const authorId = new mongoose.Types.ObjectId();
+ const userId = new mongoose.Types.ObjectId();
+ const agentId = `agent_${uuidv4()}`;
+ const projectId = new mongoose.Types.ObjectId();
+
+ await createAgent({
+ id: agentId,
+ name: 'Project Agent',
+ provider: 'openai',
+ model: 'gpt-4',
+ author: authorId,
+ projectIds: [projectId],
+ });
+
+ const mockReq = { user: { id: userId.toString() } };
+ const result = await loadAgent({
+ req: mockReq,
+ agent_id: agentId,
+ endpoint: 'openai',
+ model_parameters: { model: 'gpt-4' },
+ });
+
+ // With the new permission system, loadAgent returns the agent regardless of permissions
+ // Permission checks are handled at the route level via middleware
+ expect(result).toBeTruthy();
+ expect(result.id).toBe(agentId);
+ expect(result.name).toBe('Project Agent');
+ });
+ });
+ });
describe('Agent Edge Cases and Error Handling', () => {
+ let mongoServer;
+
+ beforeAll(async () => {
+ mongoServer = await MongoMemoryServer.create();
+ const mongoUri = mongoServer.getUri();
+ Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
+ await mongoose.connect(mongoUri);
+ }, 20000);
+
+ afterAll(async () => {
+ await mongoose.disconnect();
+ await mongoServer.stop();
+ });
+
beforeEach(async () => {
await Agent.deleteMany({});
});
@@ -2476,13 +2691,14 @@ describe('Agent Methods', () => {
expect(agent).toBeDefined();
expect(agent.id).toBe(agentId);
expect(agent.versions).toHaveLength(1);
- expect(agent.versions![0]?.provider).toBe('test');
- expect(agent.versions![0]?.model).toBe('test-model');
+ expect(agent.versions[0].provider).toBe('test');
+ expect(agent.versions[0].model).toBe('test-model');
});
test('should handle agent creation with all optional fields', async () => {
const agentId = `agent_${uuidv4()}`;
const authorId = new mongoose.Types.ObjectId();
+ const projectId = new mongoose.Types.ObjectId();
const agent = await createAgent({
id: agentId,
@@ -2495,7 +2711,9 @@ describe('Agent Methods', () => {
tools: ['tool1', 'tool2'],
actions: ['action1', 'action2'],
model_parameters: { temperature: 0.8, max_tokens: 1000 },
+ projectIds: [projectId],
avatar: 'https://example.com/avatar.png',
+ isCollaborative: true,
tool_resources: {
file_search: { file_ids: ['file1', 'file2'] },
},
@@ -2507,10 +2725,12 @@ describe('Agent Methods', () => {
expect(agent.instructions).toBe('Complex instructions');
expect(agent.tools).toEqual(['tool1', 'tool2']);
expect(agent.actions).toEqual(['action1', 'action2']);
- expect(agent.model_parameters?.temperature).toBe(0.8);
- expect(agent.model_parameters?.max_tokens).toBe(1000);
+ expect(agent.model_parameters.temperature).toBe(0.8);
+ expect(agent.model_parameters.max_tokens).toBe(1000);
+ expect(agent.projectIds.map((id) => id.toString())).toContain(projectId.toString());
expect(agent.avatar).toBe('https://example.com/avatar.png');
- expect(agent.tool_resources?.file_search?.file_ids).toEqual(['file1', 'file2']);
+ expect(agent.isCollaborative).toBe(true);
+ expect(agent.tool_resources.file_search.file_ids).toEqual(['file1', 'file2']);
});
test('should handle updateAgent with empty update object', async () => {
@@ -2528,8 +2748,8 @@ describe('Agent Methods', () => {
const updatedAgent = await updateAgent({ id: agentId }, {});
expect(updatedAgent).toBeDefined();
- expect(updatedAgent!.name).toBe('Test Agent');
- expect(updatedAgent!.versions).toHaveLength(1); // No new version should be created
+ expect(updatedAgent.name).toBe('Test Agent');
+ expect(updatedAgent.versions).toHaveLength(1); // No new version should be created
});
test('should handle concurrent updates to different agents', async () => {
@@ -2559,10 +2779,10 @@ describe('Agent Methods', () => {
updateAgent({ id: agent2Id }, { description: 'Updated Agent 2' }),
]);
- expect(updated1?.description).toBe('Updated Agent 1');
- expect(updated2?.description).toBe('Updated Agent 2');
- expect(updated1?.versions).toHaveLength(2);
- expect(updated2?.versions).toHaveLength(2);
+ expect(updated1.description).toBe('Updated Agent 1');
+ expect(updated2.description).toBe('Updated Agent 2');
+ expect(updated1.versions).toHaveLength(2);
+ expect(updated2.versions).toHaveLength(2);
});
test('should handle agent deletion with non-existent ID', async () => {
@@ -2594,10 +2814,10 @@ describe('Agent Methods', () => {
},
);
- expect(updatedAgent!.name).toBe('Updated Name');
- expect(updatedAgent!.tools).toContain('tool1');
- expect(updatedAgent!.tools).toContain('tool2');
- expect(updatedAgent!.versions).toHaveLength(2);
+ expect(updatedAgent.name).toBe('Updated Name');
+ expect(updatedAgent.tools).toContain('tool1');
+ expect(updatedAgent.tools).toContain('tool2');
+ expect(updatedAgent.versions).toHaveLength(2);
});
test('should handle revertAgentVersion with invalid version index', async () => {
@@ -2624,9 +2844,11 @@ describe('Agent Methods', () => {
test('should handle addAgentResourceFile with non-existent agent', async () => {
const nonExistentId = `agent_${uuidv4()}`;
+ const mockReq = { user: { id: 'user123' } };
await expect(
addAgentResourceFile({
+ req: mockReq,
agent_id: nonExistentId,
tool_resource: 'file_search',
file_id: 'file123',
@@ -2667,8 +2889,8 @@ describe('Agent Methods', () => {
},
);
- expect(firstUpdate!.tools).toContain('tool1');
- expect(firstUpdate!.tools).toContain('tool2');
+ expect(firstUpdate.tools).toContain('tool1');
+ expect(firstUpdate.tools).toContain('tool2');
// Second update with direct field update and $addToSet
const secondUpdate = await updateAgent(
@@ -2680,13 +2902,13 @@ describe('Agent Methods', () => {
},
);
- expect(secondUpdate!.name).toBe('Updated Agent');
- expect(secondUpdate!.model_parameters?.temperature).toBe(0.8);
- expect(secondUpdate!.model_parameters?.max_tokens).toBe(500);
- expect(secondUpdate!.tools).toContain('tool1');
- expect(secondUpdate!.tools).toContain('tool2');
- expect(secondUpdate!.tools).toContain('tool3');
- expect(secondUpdate!.versions).toHaveLength(3);
+ expect(secondUpdate.name).toBe('Updated Agent');
+ expect(secondUpdate.model_parameters.temperature).toBe(0.8);
+ expect(secondUpdate.model_parameters.max_tokens).toBe(500);
+ expect(secondUpdate.tools).toContain('tool1');
+ expect(secondUpdate.tools).toContain('tool2');
+ expect(secondUpdate.tools).toContain('tool3');
+ expect(secondUpdate.versions).toHaveLength(3);
});
test('should preserve version order in versions array', async () => {
@@ -2705,12 +2927,27 @@ describe('Agent Methods', () => {
await updateAgent({ id: agentId }, { name: 'Version 3' });
const finalAgent = await updateAgent({ id: agentId }, { name: 'Version 4' });
- expect(finalAgent!.versions).toHaveLength(4);
- expect(finalAgent!.versions![0]?.name).toBe('Version 1');
- expect(finalAgent!.versions![1]?.name).toBe('Version 2');
- expect(finalAgent!.versions![2]?.name).toBe('Version 3');
- expect(finalAgent!.versions![3]?.name).toBe('Version 4');
- expect(finalAgent!.name).toBe('Version 4');
+ expect(finalAgent.versions).toHaveLength(4);
+ expect(finalAgent.versions[0].name).toBe('Version 1');
+ expect(finalAgent.versions[1].name).toBe('Version 2');
+ expect(finalAgent.versions[2].name).toBe('Version 3');
+ expect(finalAgent.versions[3].name).toBe('Version 4');
+ expect(finalAgent.name).toBe('Version 4');
+ });
+
+ test('should handle updateAgentProjects error scenarios', async () => {
+ const nonExistentId = `agent_${uuidv4()}`;
+ const userId = new mongoose.Types.ObjectId();
+ const projectId = new mongoose.Types.ObjectId();
+
+ // Test with non-existent agent
+ const result = await updateAgentProjects({
+ user: { id: userId.toString() },
+ agentId: nonExistentId,
+ projectIds: [projectId.toString()],
+ });
+
+ expect(result).toBeNull();
});
test('should handle revertAgentVersion properly', async () => {
@@ -2759,13 +2996,15 @@ describe('Agent Methods', () => {
);
expect(updatedAgent).toBeDefined();
- expect(updatedAgent!.description).toBe('Updated description');
- expect(updatedAgent!.versions).toHaveLength(2);
+ expect(updatedAgent.description).toBe('Updated description');
+ expect(updatedAgent.versions).toHaveLength(2);
});
test('should handle updateAgent with combined MongoDB operators', async () => {
const agentId = `agent_${uuidv4()}`;
const authorId = new mongoose.Types.ObjectId();
+ const projectId1 = new mongoose.Types.ObjectId();
+ const projectId2 = new mongoose.Types.ObjectId();
await createAgent({
id: agentId,
@@ -2774,6 +3013,7 @@ describe('Agent Methods', () => {
model: 'test-model',
author: authorId,
tools: ['tool1'],
+ projectIds: [projectId1],
});
// Use multiple operators in single update - but avoid conflicting operations on same field
@@ -2782,14 +3022,26 @@ describe('Agent Methods', () => {
{
name: 'Updated Name',
$push: { tools: 'tool2' },
+ $addToSet: { projectIds: projectId2 },
+ },
+ );
+
+ const finalAgent = await updateAgent(
+ { id: agentId },
+ {
+ $pull: { projectIds: projectId1 },
},
);
expect(updatedAgent).toBeDefined();
- expect(updatedAgent!.name).toBe('Updated Name');
- expect(updatedAgent!.tools).toContain('tool1');
- expect(updatedAgent!.tools).toContain('tool2');
- expect(updatedAgent!.versions).toHaveLength(2);
+ expect(updatedAgent.name).toBe('Updated Name');
+ expect(updatedAgent.tools).toContain('tool1');
+ expect(updatedAgent.tools).toContain('tool2');
+ expect(updatedAgent.projectIds.map((id) => id.toString())).toContain(projectId2.toString());
+
+ expect(finalAgent).toBeDefined();
+ expect(finalAgent.projectIds.map((id) => id.toString())).not.toContain(projectId1.toString());
+ expect(finalAgent.versions).toHaveLength(3);
});
test('should handle updateAgent when agent does not exist', async () => {
@@ -2870,6 +3122,54 @@ describe('Agent Methods', () => {
Agent.findOneAndUpdate = originalFindOneAndUpdate;
});
+ test('should handle loadEphemeralAgent with malformed MCP tool names', async () => {
+ const { EPHEMERAL_AGENT_ID } = require('librechat-data-provider').Constants;
+
+ getCachedTools.mockResolvedValue({
+ malformed_tool_name: {}, // No mcp delimiter
+ tool__server1: {}, // Wrong delimiter
+ tool_mcp_server1: {}, // Correct format
+ tool_mcp_server2: {}, // Different server
+ });
+
+ // Mock getMCPServerTools to return only tools matching the server
+ getMCPServerTools.mockImplementation(async (_userId, server) => {
+ if (server === 'server1') {
+ // Only return tool that correctly matches server1 format
+ return { tool_mcp_server1: {} };
+ } else if (server === 'server2') {
+ return { tool_mcp_server2: {} };
+ }
+ return null;
+ });
+
+ const mockReq = {
+ user: { id: 'user123' },
+ body: {
+ promptPrefix: 'Test instructions',
+ ephemeralAgent: {
+ execute_code: false,
+ web_search: false,
+ mcp: ['server1'],
+ },
+ },
+ };
+
+ const result = await loadAgent({
+ req: mockReq,
+ agent_id: EPHEMERAL_AGENT_ID,
+ endpoint: 'openai',
+ model_parameters: { model: 'gpt-4' },
+ });
+
+ if (result) {
+ expect(result.tools).toEqual(['tool_mcp_server1']);
+ expect(result.tools).not.toContain('malformed_tool_name');
+ expect(result.tools).not.toContain('tool__server1');
+ expect(result.tools).not.toContain('tool_mcp_server2');
+ }
+ });
+
test('should handle addAgentResourceFile when array initialization fails', async () => {
const agentId = `agent_${uuidv4()}`;
const authorId = new mongoose.Types.ObjectId();
@@ -2890,10 +3190,7 @@ describe('Agent Methods', () => {
updateOneCalled = true;
return Promise.reject(new Error('Database error'));
}
- return originalUpdateOne.apply(
- Agent,
- args as [update: UpdateQuery | UpdateWithAggregationPipeline],
- );
+ return originalUpdateOne.apply(Agent, args);
});
try {
@@ -2905,8 +3202,8 @@ describe('Agent Methods', () => {
expect(result).toBeDefined();
expect(result.tools).toContain('new_tool');
- } catch (error: unknown) {
- expect((error as Error).message).toBe('Database error');
+ } catch (error) {
+ expect(error.message).toBe('Database error');
}
Agent.updateOne = originalUpdateOne;
@@ -2914,6 +3211,20 @@ describe('Agent Methods', () => {
});
describe('Agent IDs Field in Version Detection', () => {
+ let mongoServer;
+
+ beforeAll(async () => {
+ mongoServer = await MongoMemoryServer.create();
+ const mongoUri = mongoServer.getUri();
+ Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
+ await mongoose.connect(mongoUri);
+ }, 20000);
+
+ afterAll(async () => {
+ await mongoose.disconnect();
+ await mongoServer.stop();
+ });
+
beforeEach(async () => {
await Agent.deleteMany({});
});
@@ -2940,8 +3251,8 @@ describe('Agent Methods', () => {
);
// Since agent_ids is no longer excluded, this should create a new version
- expect(updated?.versions).toHaveLength(2);
- expect(updated?.agent_ids).toEqual(['agent1', 'agent2', 'agent3']);
+ expect(updated.versions).toHaveLength(2);
+ expect(updated.agent_ids).toEqual(['agent1', 'agent2', 'agent3']);
});
test('should detect duplicate version if agent_ids is updated to same value', async () => {
@@ -2961,14 +3272,14 @@ describe('Agent Methods', () => {
{ id: agentId },
{ agent_ids: ['agent1', 'agent2', 'agent3'] },
);
- expect(updatedAgent!.versions).toHaveLength(2);
+ expect(updatedAgent.versions).toHaveLength(2);
// Update with same agent_ids should succeed but not create a new version
const duplicateUpdate = await updateAgent(
{ id: agentId },
{ agent_ids: ['agent1', 'agent2', 'agent3'] },
);
- expect(duplicateUpdate?.versions).toHaveLength(2); // No new version created
+ expect(duplicateUpdate.versions).toHaveLength(2); // No new version created
});
test('should handle agent_ids field alongside other fields', async () => {
@@ -2993,15 +3304,74 @@ describe('Agent Methods', () => {
},
);
- expect(updated?.versions).toHaveLength(2);
- expect(updated?.agent_ids).toEqual(['agent1', 'agent2']);
- expect(updated?.description).toBe('Updated description');
+ expect(updated.versions).toHaveLength(2);
+ expect(updated.agent_ids).toEqual(['agent1', 'agent2']);
+ expect(updated.description).toBe('Updated description');
const updated2 = await updateAgent({ id: agentId }, { description: 'Another description' });
- expect(updated2?.versions).toHaveLength(3);
- expect(updated2?.agent_ids).toEqual(['agent1', 'agent2']);
- expect(updated2?.description).toBe('Another description');
+ expect(updated2.versions).toHaveLength(3);
+ expect(updated2.agent_ids).toEqual(['agent1', 'agent2']);
+ expect(updated2.description).toBe('Another description');
+ });
+
+ test('should skip version creation when skipVersioning option is used', async () => {
+ const agentId = `agent_${uuidv4()}`;
+ const authorId = new mongoose.Types.ObjectId();
+ const projectId1 = new mongoose.Types.ObjectId();
+ const projectId2 = new mongoose.Types.ObjectId();
+
+ // Create agent with initial projectIds
+ await createAgent({
+ id: agentId,
+ name: 'Test Agent',
+ provider: 'test',
+ model: 'test-model',
+ author: authorId,
+ projectIds: [projectId1],
+ });
+
+ // Share agent using updateAgentProjects (which uses skipVersioning)
+ const shared = await updateAgentProjects({
+ user: { id: authorId.toString() }, // Use the same author ID
+ agentId: agentId,
+ projectIds: [projectId2.toString()],
+ });
+
+ // Should NOT create a new version due to skipVersioning
+ expect(shared.versions).toHaveLength(1);
+ expect(shared.projectIds.map((id) => id.toString())).toContain(projectId1.toString());
+ expect(shared.projectIds.map((id) => id.toString())).toContain(projectId2.toString());
+
+ // Unshare agent using updateAgentProjects
+ const unshared = await updateAgentProjects({
+ user: { id: authorId.toString() },
+ agentId: agentId,
+ removeProjectIds: [projectId1.toString()],
+ });
+
+ // Still should NOT create a new version
+ expect(unshared.versions).toHaveLength(1);
+ expect(unshared.projectIds.map((id) => id.toString())).not.toContain(projectId1.toString());
+ expect(unshared.projectIds.map((id) => id.toString())).toContain(projectId2.toString());
+
+ // Regular update without skipVersioning should create a version
+ const regularUpdate = await updateAgent(
+ { id: agentId },
+ { description: 'Updated description' },
+ );
+
+ expect(regularUpdate.versions).toHaveLength(2);
+ expect(regularUpdate.description).toBe('Updated description');
+
+ // Direct updateAgent with MongoDB operators should still create versions
+ const directUpdate = await updateAgent(
+ { id: agentId },
+ { $addToSet: { projectIds: { $each: [projectId1] } } },
+ );
+
+ expect(directUpdate.versions).toHaveLength(3);
+ expect(directUpdate.projectIds.length).toBe(2);
});
test('should preserve agent_ids in version history', async () => {
@@ -3023,11 +3393,11 @@ describe('Agent Methods', () => {
const finalAgent = await getAgent({ id: agentId });
- expect(finalAgent!.versions).toHaveLength(3);
- expect(finalAgent!.versions![0]?.agent_ids).toEqual(['agent1']);
- expect(finalAgent!.versions![1]?.agent_ids).toEqual(['agent1', 'agent2']);
- expect(finalAgent!.versions![2]?.agent_ids).toEqual(['agent3']);
- expect(finalAgent!.agent_ids).toEqual(['agent3']);
+ expect(finalAgent.versions).toHaveLength(3);
+ expect(finalAgent.versions[0].agent_ids).toEqual(['agent1']);
+ expect(finalAgent.versions[1].agent_ids).toEqual(['agent1', 'agent2']);
+ expect(finalAgent.versions[2].agent_ids).toEqual(['agent3']);
+ expect(finalAgent.agent_ids).toEqual(['agent3']);
});
test('should handle empty agent_ids arrays', async () => {
@@ -3045,13 +3415,13 @@ describe('Agent Methods', () => {
const updated = await updateAgent({ id: agentId }, { agent_ids: [] });
- expect(updated?.versions).toHaveLength(2);
- expect(updated?.agent_ids).toEqual([]);
+ expect(updated.versions).toHaveLength(2);
+ expect(updated.agent_ids).toEqual([]);
// Update with same empty agent_ids should succeed but not create a new version
const duplicateUpdate = await updateAgent({ id: agentId }, { agent_ids: [] });
- expect(duplicateUpdate?.versions).toHaveLength(2); // No new version created
- expect(duplicateUpdate?.agent_ids).toEqual([]);
+ expect(duplicateUpdate.versions).toHaveLength(2); // No new version created
+ expect(duplicateUpdate.agent_ids).toEqual([]);
});
test('should handle agent without agent_ids field', async () => {
@@ -3070,13 +3440,27 @@ describe('Agent Methods', () => {
const updated = await updateAgent({ id: agentId }, { agent_ids: ['agent1'] });
- expect(updated?.versions).toHaveLength(2);
- expect(updated?.agent_ids).toEqual(['agent1']);
+ expect(updated.versions).toHaveLength(2);
+ expect(updated.agent_ids).toEqual(['agent1']);
});
});
});
describe('Support Contact Field', () => {
+ let mongoServer;
+
+ beforeAll(async () => {
+ mongoServer = await MongoMemoryServer.create();
+ const mongoUri = mongoServer.getUri();
+ Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
+ await mongoose.connect(mongoUri);
+ }, 20000);
+
+ afterAll(async () => {
+ await mongoose.disconnect();
+ await mongoServer.stop();
+ });
+
beforeEach(async () => {
await Agent.deleteMany({});
});
@@ -3100,18 +3484,18 @@ describe('Support Contact Field', () => {
// Verify support_contact is stored correctly
expect(agent.support_contact).toBeDefined();
- expect(agent.support_contact?.name).toBe('Support Team');
- expect(agent.support_contact?.email).toBe('support@example.com');
+ expect(agent.support_contact.name).toBe('Support Team');
+ expect(agent.support_contact.email).toBe('support@example.com');
// Verify no _id field is created in support_contact
- expect((agent.support_contact as Record)?._id).toBeUndefined();
+ expect(agent.support_contact._id).toBeUndefined();
// Fetch from database to double-check
const dbAgent = await Agent.findOne({ id: agentData.id });
- expect(dbAgent?.support_contact).toBeDefined();
- expect(dbAgent?.support_contact?.name).toBe('Support Team');
- expect(dbAgent?.support_contact?.email).toBe('support@example.com');
- expect((dbAgent?.support_contact as Record)?._id).toBeUndefined();
+ expect(dbAgent.support_contact).toBeDefined();
+ expect(dbAgent.support_contact.name).toBe('Support Team');
+ expect(dbAgent.support_contact.email).toBe('support@example.com');
+ expect(dbAgent.support_contact._id).toBeUndefined();
});
it('should handle empty support_contact correctly', async () => {
@@ -3129,7 +3513,7 @@ describe('Support Contact Field', () => {
// Verify empty support_contact is stored as empty object
expect(agent.support_contact).toEqual({});
- expect((agent.support_contact as Record)?._id).toBeUndefined();
+ expect(agent.support_contact._id).toBeUndefined();
});
it('should handle missing support_contact correctly', async () => {
@@ -3149,12 +3533,11 @@ describe('Support Contact Field', () => {
});
describe('getListAgentsByAccess - Security Tests', () => {
- let userA: mongoose.Types.ObjectId, userB: mongoose.Types.ObjectId;
- let agentA1: Awaited>,
- agentA2: Awaited>,
- agentA3: Awaited>;
+ let userA, userB;
+ let agentA1, agentA2, agentA3;
beforeEach(async () => {
+ Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
await Agent.deleteMany({});
await AclEntry.deleteMany({});
@@ -3217,7 +3600,7 @@ describe('Support Contact Field', () => {
test('should only return agents in accessibleIds list', async () => {
// Give User B access to only one of User A's agents
- const accessibleIds = [agentA1._id] as mongoose.Types.ObjectId[];
+ const accessibleIds = [agentA1._id];
const result = await getListAgentsByAccess({
accessibleIds,
@@ -3231,7 +3614,7 @@ describe('Support Contact Field', () => {
test('should return multiple accessible agents when provided', async () => {
// Give User B access to two of User A's agents
- const accessibleIds = [agentA1._id, agentA3._id] as mongoose.Types.ObjectId[];
+ const accessibleIds = [agentA1._id, agentA3._id];
const result = await getListAgentsByAccess({
accessibleIds,
@@ -3247,7 +3630,7 @@ describe('Support Contact Field', () => {
test('should respect other query parameters while enforcing accessibleIds', async () => {
// Give access to all agents but filter by name
- const accessibleIds = [agentA1._id, agentA2._id, agentA3._id] as mongoose.Types.ObjectId[];
+ const accessibleIds = [agentA1._id, agentA2._id, agentA3._id];
const result = await getListAgentsByAccess({
accessibleIds,
@@ -3274,9 +3657,7 @@ describe('Support Contact Field', () => {
}
// Give access to all agents
- const allAgentIds = [agentA1, agentA2, agentA3, ...moreAgents].map(
- (a) => a._id,
- ) as mongoose.Types.ObjectId[];
+ const allAgentIds = [agentA1, agentA2, agentA3, ...moreAgents].map((a) => a._id);
// First page
const page1 = await getListAgentsByAccess({
@@ -3343,7 +3724,7 @@ describe('Support Contact Field', () => {
});
// Give User B access to one of User A's agents
- const accessibleIds = [agentA1._id, agentB1._id] as mongoose.Types.ObjectId[];
+ const accessibleIds = [agentA1._id, agentB1._id];
// Filter by author should further restrict the results
const result = await getListAgentsByAccess({
@@ -3373,21 +3754,18 @@ function createTestIds() {
return {
agentId: `agent_${uuidv4()}`,
authorId: new mongoose.Types.ObjectId(),
+ projectId: new mongoose.Types.ObjectId(),
fileId: uuidv4(),
};
}
-function createFileOperations(agentId: string, fileIds: string[], operation = 'add') {
+function createFileOperations(agentId, fileIds, operation = 'add') {
return fileIds.map((fileId) =>
operation === 'add'
- ? addAgentResourceFile({
- agent_id: agentId,
- tool_resource: EToolResources.execute_code,
- file_id: fileId,
- })
+ ? addAgentResourceFile({ agent_id: agentId, tool_resource: 'test_tool', file_id: fileId })
: removeAgentResourceFiles({
agent_id: agentId,
- files: [{ tool_resource: EToolResources.execute_code, file_id: fileId }],
+ files: [{ tool_resource: 'test_tool', file_id: fileId }],
}),
);
}
@@ -3401,14 +3779,7 @@ function mockFindOneAndUpdateError(errorOnCall = 1) {
if (callCount === errorOnCall) {
throw new Error('Database connection lost');
}
- return original.apply(
- Agent,
- args as [
- filter?: RootFilterQuery | undefined,
- update?: UpdateQuery | undefined,
- options?: QueryOptions | null | undefined,
- ],
- );
+ return original.apply(Agent, args);
});
return () => {
@@ -3417,6 +3788,9 @@ function mockFindOneAndUpdateError(errorOnCall = 1) {
}
function generateVersionTestCases() {
+ const projectId1 = new mongoose.Types.ObjectId();
+ const projectId2 = new mongoose.Types.ObjectId();
+
return [
{
name: 'simple field update',
@@ -3443,5 +3817,13 @@ function generateVersionTestCases() {
update: { tools: ['tool2', 'tool3'] },
duplicate: { tools: ['tool2', 'tool3'] },
},
+ {
+ name: 'projectIds update',
+ initial: {
+ projectIds: [projectId1],
+ },
+ update: { projectIds: [projectId1, projectId2] },
+ duplicate: { projectIds: [projectId2, projectId1] },
+ },
];
}
diff --git a/api/models/Assistant.js b/api/models/Assistant.js
new file mode 100644
index 0000000000..be94d35d7d
--- /dev/null
+++ b/api/models/Assistant.js
@@ -0,0 +1,62 @@
+const { Assistant } = require('~/db/models');
+
+/**
+ * Update an assistant with new data without overwriting existing properties,
+ * or create a new assistant if it doesn't exist.
+ *
+ * @param {Object} searchParams - The search parameters to find the assistant to update.
+ * @param {string} searchParams.assistant_id - The ID of the assistant to update.
+ * @param {string} searchParams.user - The user ID of the assistant's author.
+ * @param {Object} updateData - An object containing the properties to update.
+ * @returns {Promise} The updated or newly created assistant document as a plain object.
+ */
+const updateAssistantDoc = async (searchParams, updateData) => {
+ const options = { new: true, upsert: true };
+ return await Assistant.findOneAndUpdate(searchParams, updateData, options).lean();
+};
+
+/**
+ * Retrieves an assistant document based on the provided ID.
+ *
+ * @param {Object} searchParams - The search parameters to find the assistant to update.
+ * @param {string} searchParams.assistant_id - The ID of the assistant to update.
+ * @param {string} searchParams.user - The user ID of the assistant's author.
+ * @returns {Promise} The assistant document as a plain object, or null if not found.
+ */
+const getAssistant = async (searchParams) => await Assistant.findOne(searchParams).lean();
+
+/**
+ * Retrieves all assistants that match the given search parameters.
+ *
+ * @param {Object} searchParams - The search parameters to find matching assistants.
+ * @param {Object} [select] - Optional. Specifies which document fields to include or exclude.
+ * @returns {Promise>} A promise that resolves to an array of assistant documents as plain objects.
+ */
+const getAssistants = async (searchParams, select = null) => {
+ let query = Assistant.find(searchParams);
+
+ if (select) {
+ query = query.select(select);
+ }
+
+ return await query.lean();
+};
+
+/**
+ * Deletes an assistant based on the provided ID.
+ *
+ * @param {Object} searchParams - The search parameters to find the assistant to delete.
+ * @param {string} searchParams.assistant_id - The ID of the assistant to delete.
+ * @param {string} searchParams.user - The user ID of the assistant's author.
+ * @returns {Promise} Resolves when the assistant has been successfully deleted.
+ */
+const deleteAssistant = async (searchParams) => {
+ return await Assistant.findOneAndDelete(searchParams);
+};
+
+module.exports = {
+ updateAssistantDoc,
+ deleteAssistant,
+ getAssistants,
+ getAssistant,
+};
diff --git a/api/models/Banner.js b/api/models/Banner.js
new file mode 100644
index 0000000000..42ad1599ed
--- /dev/null
+++ b/api/models/Banner.js
@@ -0,0 +1,28 @@
+const { logger } = require('@librechat/data-schemas');
+const { Banner } = require('~/db/models');
+
+/**
+ * Retrieves the current active banner.
+ * @returns {Promise} The active banner object or null if no active banner is found.
+ */
+const getBanner = async (user) => {
+ try {
+ const now = new Date();
+ const banner = await Banner.findOne({
+ displayFrom: { $lte: now },
+ $or: [{ displayTo: { $gte: now } }, { displayTo: null }],
+ type: 'banner',
+ }).lean();
+
+ if (!banner || banner.isPublic || user) {
+ return banner;
+ }
+
+ return null;
+ } catch (error) {
+ logger.error('[getBanners] Error getting banners', error);
+ throw new Error('Error getting banners');
+ }
+};
+
+module.exports = { getBanner };
diff --git a/api/models/Categories.js b/api/models/Categories.js
new file mode 100644
index 0000000000..34bd2d8ed2
--- /dev/null
+++ b/api/models/Categories.js
@@ -0,0 +1,57 @@
+const { logger } = require('@librechat/data-schemas');
+
+const options = [
+ {
+ label: 'com_ui_idea',
+ value: 'idea',
+ },
+ {
+ label: 'com_ui_travel',
+ value: 'travel',
+ },
+ {
+ label: 'com_ui_teach_or_explain',
+ value: 'teach_or_explain',
+ },
+ {
+ label: 'com_ui_write',
+ value: 'write',
+ },
+ {
+ label: 'com_ui_shop',
+ value: 'shop',
+ },
+ {
+ label: 'com_ui_code',
+ value: 'code',
+ },
+ {
+ label: 'com_ui_misc',
+ value: 'misc',
+ },
+ {
+ label: 'com_ui_roleplay',
+ value: 'roleplay',
+ },
+ {
+ label: 'com_ui_finance',
+ value: 'finance',
+ },
+];
+
+module.exports = {
+ /**
+ * Retrieves the categories asynchronously.
+ * @returns {Promise} An array of category objects.
+ * @throws {Error} If there is an error retrieving the categories.
+ */
+ getCategories: async () => {
+ try {
+ // const categories = await Categories.find();
+ return options;
+ } catch (error) {
+ logger.error('Error getting categories', error);
+ return [];
+ }
+ },
+};
diff --git a/api/models/Conversation.js b/api/models/Conversation.js
new file mode 100644
index 0000000000..32eac1a764
--- /dev/null
+++ b/api/models/Conversation.js
@@ -0,0 +1,372 @@
+const { logger } = require('@librechat/data-schemas');
+const { createTempChatExpirationDate } = require('@librechat/api');
+const { getMessages, deleteMessages } = require('./Message');
+const { Conversation } = require('~/db/models');
+
+/**
+ * Searches for a conversation by conversationId and returns a lean document with only conversationId and user.
+ * @param {string} conversationId - The conversation's ID.
+ * @returns {Promise<{conversationId: string, user: string} | null>} The conversation object with selected fields or null if not found.
+ */
+const searchConversation = async (conversationId) => {
+ try {
+ return await Conversation.findOne({ conversationId }, 'conversationId user').lean();
+ } catch (error) {
+ logger.error('[searchConversation] Error searching conversation', error);
+ throw new Error('Error searching conversation');
+ }
+};
+
+/**
+ * Retrieves a single conversation for a given user and conversation ID.
+ * @param {string} user - The user's ID.
+ * @param {string} conversationId - The conversation's ID.
+ * @returns {Promise} The conversation object.
+ */
+const getConvo = async (user, conversationId) => {
+ try {
+ return await Conversation.findOne({ user, conversationId }).lean();
+ } catch (error) {
+ logger.error('[getConvo] Error getting single conversation', error);
+ throw new Error('Error getting single conversation');
+ }
+};
+
+const deleteNullOrEmptyConversations = async () => {
+ try {
+ const filter = {
+ $or: [
+ { conversationId: null },
+ { conversationId: '' },
+ { conversationId: { $exists: false } },
+ ],
+ };
+
+ const result = await Conversation.deleteMany(filter);
+
+ // Delete associated messages
+ const messageDeleteResult = await deleteMessages(filter);
+
+ logger.info(
+ `[deleteNullOrEmptyConversations] Deleted ${result.deletedCount} conversations and ${messageDeleteResult.deletedCount} messages`,
+ );
+
+ return {
+ conversations: result,
+ messages: messageDeleteResult,
+ };
+ } catch (error) {
+ logger.error('[deleteNullOrEmptyConversations] Error deleting conversations', error);
+ throw new Error('Error deleting conversations with null or empty conversationId');
+ }
+};
+
+/**
+ * Searches for a conversation by conversationId and returns associated file ids.
+ * @param {string} conversationId - The conversation's ID.
+ * @returns {Promise}
+ */
+const getConvoFiles = async (conversationId) => {
+ try {
+ return (await Conversation.findOne({ conversationId }, 'files').lean())?.files ?? [];
+ } catch (error) {
+ logger.error('[getConvoFiles] Error getting conversation files', error);
+ throw new Error('Error getting conversation files');
+ }
+};
+
+module.exports = {
+ getConvoFiles,
+ searchConversation,
+ deleteNullOrEmptyConversations,
+ /**
+ * Saves a conversation to the database.
+ * @param {Object} req - The request object.
+ * @param {string} conversationId - The conversation's ID.
+ * @param {Object} metadata - Additional metadata to log for operation.
+ * @returns {Promise} The conversation object.
+ */
+ saveConvo: async (req, { conversationId, newConversationId, ...convo }, metadata) => {
+ try {
+ if (metadata?.context) {
+ logger.debug(`[saveConvo] ${metadata.context}`);
+ }
+
+ const messages = await getMessages({ conversationId }, '_id');
+ const update = { ...convo, messages, user: req.user.id };
+
+ if (newConversationId) {
+ update.conversationId = newConversationId;
+ }
+
+ if (req?.body?.isTemporary) {
+ try {
+ const appConfig = req.config;
+ update.expiredAt = createTempChatExpirationDate(appConfig?.interfaceConfig);
+ } catch (err) {
+ logger.error('Error creating temporary chat expiration date:', err);
+ logger.info(`---\`saveConvo\` context: ${metadata?.context}`);
+ update.expiredAt = null;
+ }
+ } else {
+ update.expiredAt = null;
+ }
+
+ /** @type {{ $set: Partial; $unset?: Record }} */
+ const updateOperation = { $set: update };
+ if (metadata && metadata.unsetFields && Object.keys(metadata.unsetFields).length > 0) {
+ updateOperation.$unset = metadata.unsetFields;
+ }
+
+ /** Note: the resulting Model object is necessary for Meilisearch operations */
+ const conversation = await Conversation.findOneAndUpdate(
+ { conversationId, user: req.user.id },
+ updateOperation,
+ {
+ new: true,
+ upsert: metadata?.noUpsert !== true,
+ },
+ );
+
+ if (!conversation) {
+ logger.debug('[saveConvo] Conversation not found, skipping update');
+ return null;
+ }
+
+ return conversation.toObject();
+ } catch (error) {
+ logger.error('[saveConvo] Error saving conversation', error);
+ if (metadata && metadata?.context) {
+ logger.info(`[saveConvo] ${metadata.context}`);
+ }
+ return { message: 'Error saving conversation' };
+ }
+ },
+ bulkSaveConvos: async (conversations) => {
+ try {
+ const bulkOps = conversations.map((convo) => ({
+ updateOne: {
+ filter: { conversationId: convo.conversationId, user: convo.user },
+ update: convo,
+ upsert: true,
+ timestamps: false,
+ },
+ }));
+
+ const result = await Conversation.bulkWrite(bulkOps);
+ return result;
+ } catch (error) {
+ logger.error('[bulkSaveConvos] Error saving conversations in bulk', error);
+ throw new Error('Failed to save conversations in bulk.');
+ }
+ },
+ getConvosByCursor: async (
+ user,
+ {
+ cursor,
+ limit = 25,
+ isArchived = false,
+ tags,
+ search,
+ sortBy = 'updatedAt',
+ sortDirection = 'desc',
+ } = {},
+ ) => {
+ const filters = [{ user }];
+ if (isArchived) {
+ filters.push({ isArchived: true });
+ } else {
+ filters.push({ $or: [{ isArchived: false }, { isArchived: { $exists: false } }] });
+ }
+
+ if (Array.isArray(tags) && tags.length > 0) {
+ filters.push({ tags: { $in: tags } });
+ }
+
+ filters.push({ $or: [{ expiredAt: null }, { expiredAt: { $exists: false } }] });
+
+ if (search) {
+ try {
+ const meiliResults = await Conversation.meiliSearch(search, { filter: `user = "${user}"` });
+ const matchingIds = Array.isArray(meiliResults.hits)
+ ? meiliResults.hits.map((result) => result.conversationId)
+ : [];
+ if (!matchingIds.length) {
+ return { conversations: [], nextCursor: null };
+ }
+ filters.push({ conversationId: { $in: matchingIds } });
+ } catch (error) {
+ logger.error('[getConvosByCursor] Error during meiliSearch', error);
+ throw new Error('Error during meiliSearch');
+ }
+ }
+
+ const validSortFields = ['title', 'createdAt', 'updatedAt'];
+ if (!validSortFields.includes(sortBy)) {
+ throw new Error(
+ `Invalid sortBy field: ${sortBy}. Must be one of ${validSortFields.join(', ')}`,
+ );
+ }
+ const finalSortBy = sortBy;
+ const finalSortDirection = sortDirection === 'asc' ? 'asc' : 'desc';
+
+ let cursorFilter = null;
+ if (cursor) {
+ try {
+ const decoded = JSON.parse(Buffer.from(cursor, 'base64').toString());
+ const { primary, secondary } = decoded;
+ const primaryValue = finalSortBy === 'title' ? primary : new Date(primary);
+ const secondaryValue = new Date(secondary);
+ const op = finalSortDirection === 'asc' ? '$gt' : '$lt';
+
+ cursorFilter = {
+ $or: [
+ { [finalSortBy]: { [op]: primaryValue } },
+ {
+ [finalSortBy]: primaryValue,
+ updatedAt: { [op]: secondaryValue },
+ },
+ ],
+ };
+ } catch (err) {
+ logger.warn('[getConvosByCursor] Invalid cursor format, starting from beginning');
+ }
+ if (cursorFilter) {
+ filters.push(cursorFilter);
+ }
+ }
+
+ const query = filters.length === 1 ? filters[0] : { $and: filters };
+
+ try {
+ const sortOrder = finalSortDirection === 'asc' ? 1 : -1;
+ const sortObj = { [finalSortBy]: sortOrder };
+
+ if (finalSortBy !== 'updatedAt') {
+ sortObj.updatedAt = sortOrder;
+ }
+
+ const convos = await Conversation.find(query)
+ .select(
+ 'conversationId endpoint title createdAt updatedAt user model agent_id assistant_id spec iconURL',
+ )
+ .sort(sortObj)
+ .limit(limit + 1)
+ .lean();
+
+ let nextCursor = null;
+ if (convos.length > limit) {
+ convos.pop(); // Remove extra item used to detect next page
+ // Create cursor from the last RETURNED item (not the popped one)
+ const lastReturned = convos[convos.length - 1];
+ const primaryValue = lastReturned[finalSortBy];
+ const primaryStr = finalSortBy === 'title' ? primaryValue : primaryValue.toISOString();
+ const secondaryStr = lastReturned.updatedAt.toISOString();
+ const composite = { primary: primaryStr, secondary: secondaryStr };
+ nextCursor = Buffer.from(JSON.stringify(composite)).toString('base64');
+ }
+
+ return { conversations: convos, nextCursor };
+ } catch (error) {
+ logger.error('[getConvosByCursor] Error getting conversations', error);
+ throw new Error('Error getting conversations');
+ }
+ },
+ getConvosQueried: async (user, convoIds, cursor = null, limit = 25) => {
+ try {
+ if (!convoIds?.length) {
+ return { conversations: [], nextCursor: null, convoMap: {} };
+ }
+
+ const conversationIds = convoIds.map((convo) => convo.conversationId);
+
+ const results = await Conversation.find({
+ user,
+ conversationId: { $in: conversationIds },
+ $or: [{ expiredAt: { $exists: false } }, { expiredAt: null }],
+ }).lean();
+
+ results.sort((a, b) => new Date(b.updatedAt) - new Date(a.updatedAt));
+
+ let filtered = results;
+ if (cursor && cursor !== 'start') {
+ const cursorDate = new Date(cursor);
+ filtered = results.filter((convo) => new Date(convo.updatedAt) < cursorDate);
+ }
+
+ const limited = filtered.slice(0, limit + 1);
+ let nextCursor = null;
+ if (limited.length > limit) {
+ limited.pop(); // Remove extra item used to detect next page
+ // Create cursor from the last RETURNED item (not the popped one)
+ nextCursor = limited[limited.length - 1].updatedAt.toISOString();
+ }
+
+ const convoMap = {};
+ limited.forEach((convo) => {
+ convoMap[convo.conversationId] = convo;
+ });
+
+ return { conversations: limited, nextCursor, convoMap };
+ } catch (error) {
+ logger.error('[getConvosQueried] Error getting conversations', error);
+ throw new Error('Error fetching conversations');
+ }
+ },
+ getConvo,
+ /* chore: this method is not properly error handled */
+ getConvoTitle: async (user, conversationId) => {
+ try {
+ const convo = await getConvo(user, conversationId);
+ /* ChatGPT Browser was triggering error here due to convo being saved later */
+ if (convo && !convo.title) {
+ return null;
+ } else {
+ // TypeError: Cannot read properties of null (reading 'title')
+ return convo?.title || 'New Chat';
+ }
+ } catch (error) {
+ logger.error('[getConvoTitle] Error getting conversation title', error);
+ throw new Error('Error getting conversation title');
+ }
+ },
+ /**
+ * Asynchronously deletes conversations and associated messages for a given user and filter.
+ *
+ * @async
+ * @function
+ * @param {string|ObjectId} user - The user's ID.
+ * @param {Object} filter - Additional filter criteria for the conversations to be deleted.
+ * @returns {Promise<{ n: number, ok: number, deletedCount: number, messages: { n: number, ok: number, deletedCount: number } }>}
+ * An object containing the count of deleted conversations and associated messages.
+ * @throws {Error} Throws an error if there's an issue with the database operations.
+ *
+ * @example
+ * const user = 'someUserId';
+ * const filter = { someField: 'someValue' };
+ * const result = await deleteConvos(user, filter);
+ * logger.error(result); // { n: 5, ok: 1, deletedCount: 5, messages: { n: 10, ok: 1, deletedCount: 10 } }
+ */
+ deleteConvos: async (user, filter) => {
+ try {
+ const userFilter = { ...filter, user };
+ const conversations = await Conversation.find(userFilter).select('conversationId');
+ const conversationIds = conversations.map((c) => c.conversationId);
+
+ if (!conversationIds.length) {
+ throw new Error('Conversation not found or already deleted.');
+ }
+
+ const deleteConvoResult = await Conversation.deleteMany(userFilter);
+
+ const deleteMessagesResult = await deleteMessages({
+ conversationId: { $in: conversationIds },
+ });
+
+ return { ...deleteConvoResult, messages: deleteMessagesResult };
+ } catch (error) {
+ logger.error('[deleteConvos] Error deleting conversations and messages', error);
+ throw error;
+ }
+ },
+};
diff --git a/packages/data-schemas/src/methods/conversation.spec.ts b/api/models/Conversation.spec.js
similarity index 64%
rename from packages/data-schemas/src/methods/conversation.spec.ts
rename to api/models/Conversation.spec.js
index 9e4c2d2f5d..bd415b4165 100644
--- a/packages/data-schemas/src/methods/conversation.spec.ts
+++ b/api/models/Conversation.spec.js
@@ -1,90 +1,39 @@
-import mongoose from 'mongoose';
-import { v4 as uuidv4 } from 'uuid';
-import { EModelEndpoint } from 'librechat-data-provider';
-import type { IConversation } from '../types';
-import { MongoMemoryServer } from 'mongodb-memory-server';
-import { ConversationMethods, createConversationMethods } from './conversation';
-import { tenantStorage, runAsSystem } from '~/config/tenantContext';
-import { createModels } from '../models';
+const mongoose = require('mongoose');
+const { v4: uuidv4 } = require('uuid');
+const { EModelEndpoint } = require('librechat-data-provider');
+const { MongoMemoryServer } = require('mongodb-memory-server');
+const {
+ deleteNullOrEmptyConversations,
+ searchConversation,
+ getConvosByCursor,
+ getConvosQueried,
+ getConvoFiles,
+ getConvoTitle,
+ deleteConvos,
+ saveConvo,
+ getConvo,
+} = require('./Conversation');
+jest.mock('~/server/services/Config/app');
+jest.mock('./Message');
+const { getMessages, deleteMessages } = require('./Message');
-jest.mock('~/config/winston', () => ({
- error: jest.fn(),
- warn: jest.fn(),
- info: jest.fn(),
- debug: jest.fn(),
-}));
-
-let mongoServer: InstanceType;
-let Conversation: mongoose.Model;
-let modelsToCleanup: string[] = [];
-
-// Mock message methods (same as original test mocking ./Message)
-const getMessages = jest.fn().mockResolvedValue([]);
-const deleteMessages = jest.fn().mockResolvedValue({ deletedCount: 0 });
-
-let methods: ConversationMethods;
-
-beforeAll(async () => {
- mongoServer = await MongoMemoryServer.create();
- const mongoUri = mongoServer.getUri();
-
- const models = createModels(mongoose);
- modelsToCleanup = Object.keys(models);
- Object.assign(mongoose.models, models);
- Conversation = mongoose.models.Conversation as mongoose.Model;
-
- methods = createConversationMethods(mongoose, { getMessages, deleteMessages });
-
- await mongoose.connect(mongoUri);
-});
-
-afterAll(async () => {
- const collections = mongoose.connection.collections;
- for (const key in collections) {
- await collections[key].deleteMany({});
- }
-
- for (const modelName of modelsToCleanup) {
- if (mongoose.models[modelName]) {
- delete mongoose.models[modelName];
- }
- }
-
- await mongoose.disconnect();
- await mongoServer.stop();
-});
-
-const saveConvo = (...args: Parameters) =>
- methods.saveConvo(...args) as Promise;
-const getConvo = (...args: Parameters) =>
- methods.getConvo(...args);
-const getConvoTitle = (...args: Parameters) =>
- methods.getConvoTitle(...args);
-const getConvoFiles = (...args: Parameters) =>
- methods.getConvoFiles(...args);
-const deleteConvos = (...args: Parameters) =>
- methods.deleteConvos(...args);
-const getConvosByCursor = (...args: Parameters) =>
- methods.getConvosByCursor(...args);
-const getConvosQueried = (...args: Parameters) =>
- methods.getConvosQueried(...args);
-const deleteNullOrEmptyConversations = (
- ...args: Parameters
-) => methods.deleteNullOrEmptyConversations(...args);
-const searchConversation = (...args: Parameters) =>
- methods.searchConversation(...args);
+const { Conversation } = require('~/db/models');
describe('Conversation Operations', () => {
- let mockCtx: {
- userId: string;
- isTemporary?: boolean;
- interfaceConfig?: { temporaryChatRetention?: number };
- };
- let mockConversationData: {
- conversationId: string;
- title: string;
- endpoint: string;
- };
+ let mongoServer;
+ let mockReq;
+ let mockConversationData;
+
+ beforeAll(async () => {
+ mongoServer = await MongoMemoryServer.create();
+ const mongoUri = mongoServer.getUri();
+ await mongoose.connect(mongoUri);
+ });
+
+ afterAll(async () => {
+ await mongoose.disconnect();
+ await mongoServer.stop();
+ });
beforeEach(async () => {
// Clear database
@@ -92,13 +41,18 @@ describe('Conversation Operations', () => {
// Reset mocks
jest.clearAllMocks();
+
+ // Default mock implementations
getMessages.mockResolvedValue([]);
deleteMessages.mockResolvedValue({ deletedCount: 0 });
- mockCtx = {
- userId: 'user123',
- interfaceConfig: {
- temporaryChatRetention: 24, // Default 24 hours
+ mockReq = {
+ user: { id: 'user123' },
+ body: {},
+ config: {
+ interfaceConfig: {
+ temporaryChatRetention: 24, // Default 24 hours
+ },
},
};
@@ -111,28 +65,29 @@ describe('Conversation Operations', () => {
describe('saveConvo', () => {
it('should save a conversation for an authenticated user', async () => {
- const result = await saveConvo(mockCtx, mockConversationData);
+ const result = await saveConvo(mockReq, mockConversationData);
- expect(result?.conversationId).toBe(mockConversationData.conversationId);
- expect(result?.user).toBe('user123');
- expect(result?.title).toBe('Test Conversation');
- expect(result?.endpoint).toBe(EModelEndpoint.openAI);
+ expect(result.conversationId).toBe(mockConversationData.conversationId);
+ expect(result.user).toBe('user123');
+ expect(result.title).toBe('Test Conversation');
+ expect(result.endpoint).toBe(EModelEndpoint.openAI);
// Verify the conversation was actually saved to the database
- const savedConvo = await Conversation.findOne({
+ const savedConvo = await Conversation.findOne({
conversationId: mockConversationData.conversationId,
user: 'user123',
});
expect(savedConvo).toBeTruthy();
- expect(savedConvo?.title).toBe('Test Conversation');
+ expect(savedConvo.title).toBe('Test Conversation');
});
it('should query messages when saving a conversation', async () => {
// Mock messages as ObjectIds
+ const mongoose = require('mongoose');
const mockMessages = [new mongoose.Types.ObjectId(), new mongoose.Types.ObjectId()];
getMessages.mockResolvedValue(mockMessages);
- await saveConvo(mockCtx, mockConversationData);
+ await saveConvo(mockReq, mockConversationData);
// Verify that getMessages was called with correct parameters
expect(getMessages).toHaveBeenCalledWith(
@@ -143,18 +98,18 @@ describe('Conversation Operations', () => {
it('should handle newConversationId when provided', async () => {
const newConversationId = uuidv4();
- const result = await saveConvo(mockCtx, {
+ const result = await saveConvo(mockReq, {
...mockConversationData,
newConversationId,
});
- expect(result?.conversationId).toBe(newConversationId);
+ expect(result.conversationId).toBe(newConversationId);
});
it('should not create a conversation when noUpsert is true and conversation does not exist', async () => {
const nonExistentId = uuidv4();
const result = await saveConvo(
- mockCtx,
+ mockReq,
{ conversationId: nonExistentId, title: 'Ghost Title' },
{ noUpsert: true },
);
@@ -166,30 +121,30 @@ describe('Conversation Operations', () => {
});
it('should update an existing conversation when noUpsert is true', async () => {
- await saveConvo(mockCtx, mockConversationData);
+ await saveConvo(mockReq, mockConversationData);
const result = await saveConvo(
- mockCtx,
+ mockReq,
{ conversationId: mockConversationData.conversationId, title: 'Updated Title' },
{ noUpsert: true },
);
expect(result).not.toBeNull();
- expect(result?.title).toBe('Updated Title');
- expect(result?.conversationId).toBe(mockConversationData.conversationId);
+ expect(result.title).toBe('Updated Title');
+ expect(result.conversationId).toBe(mockConversationData.conversationId);
});
it('should still upsert by default when noUpsert is not provided', async () => {
const newId = uuidv4();
- const result = await saveConvo(mockCtx, {
+ const result = await saveConvo(mockReq, {
conversationId: newId,
title: 'New Conversation',
endpoint: EModelEndpoint.openAI,
});
expect(result).not.toBeNull();
- expect(result?.conversationId).toBe(newId);
- expect(result?.title).toBe('New Conversation');
+ expect(result.conversationId).toBe(newId);
+ expect(result.title).toBe('New Conversation');
});
it('should handle unsetFields metadata', async () => {
@@ -197,30 +152,31 @@ describe('Conversation Operations', () => {
unsetFields: { someField: 1 },
};
- await saveConvo(mockCtx, mockConversationData, metadata);
+ await saveConvo(mockReq, mockConversationData, metadata);
- const savedConvo = await Conversation.findOne({
+ const savedConvo = await Conversation.findOne({
conversationId: mockConversationData.conversationId,
});
- expect(savedConvo?.someField).toBeUndefined();
+ expect(savedConvo.someField).toBeUndefined();
});
});
describe('isTemporary conversation handling', () => {
it('should save a conversation with expiredAt when isTemporary is true', async () => {
- mockCtx.interfaceConfig = { temporaryChatRetention: 24 };
- mockCtx.isTemporary = true;
+ mockReq.config.interfaceConfig.temporaryChatRetention = 24;
+
+ mockReq.body = { isTemporary: true };
const beforeSave = new Date();
- const result = await saveConvo(mockCtx, mockConversationData);
+ const result = await saveConvo(mockReq, mockConversationData);
const afterSave = new Date();
- expect(result?.conversationId).toBe(mockConversationData.conversationId);
- expect(result?.expiredAt).toBeDefined();
- expect(result?.expiredAt).toBeInstanceOf(Date);
+ expect(result.conversationId).toBe(mockConversationData.conversationId);
+ expect(result.expiredAt).toBeDefined();
+ expect(result.expiredAt).toBeInstanceOf(Date);
const expectedExpirationTime = new Date(beforeSave.getTime() + 24 * 60 * 60 * 1000);
- const actualExpirationTime = new Date(result?.expiredAt ?? 0);
+ const actualExpirationTime = new Date(result.expiredAt);
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
expectedExpirationTime.getTime() - 1000,
@@ -231,35 +187,36 @@ describe('Conversation Operations', () => {
});
it('should save a conversation without expiredAt when isTemporary is false', async () => {
- mockCtx.isTemporary = false;
+ mockReq.body = { isTemporary: false };
- const result = await saveConvo(mockCtx, mockConversationData);
+ const result = await saveConvo(mockReq, mockConversationData);
- expect(result?.conversationId).toBe(mockConversationData.conversationId);
- expect(result?.expiredAt).toBeNull();
+ expect(result.conversationId).toBe(mockConversationData.conversationId);
+ expect(result.expiredAt).toBeNull();
});
it('should save a conversation without expiredAt when isTemporary is not provided', async () => {
- mockCtx.isTemporary = undefined;
+ mockReq.body = {};
- const result = await saveConvo(mockCtx, mockConversationData);
+ const result = await saveConvo(mockReq, mockConversationData);
- expect(result?.conversationId).toBe(mockConversationData.conversationId);
- expect(result?.expiredAt).toBeNull();
+ expect(result.conversationId).toBe(mockConversationData.conversationId);
+ expect(result.expiredAt).toBeNull();
});
it('should use custom retention period from config', async () => {
- mockCtx.interfaceConfig = { temporaryChatRetention: 48 };
- mockCtx.isTemporary = true;
+ mockReq.config.interfaceConfig.temporaryChatRetention = 48;
+
+ mockReq.body = { isTemporary: true };
const beforeSave = new Date();
- const result = await saveConvo(mockCtx, mockConversationData);
+ const result = await saveConvo(mockReq, mockConversationData);
- expect(result?.expiredAt).toBeDefined();
+ expect(result.expiredAt).toBeDefined();
// Verify expiredAt is approximately 48 hours in the future
const expectedExpirationTime = new Date(beforeSave.getTime() + 48 * 60 * 60 * 1000);
- const actualExpirationTime = new Date(result?.expiredAt ?? 0);
+ const actualExpirationTime = new Date(result.expiredAt);
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
expectedExpirationTime.getTime() - 1000,
@@ -271,17 +228,18 @@ describe('Conversation Operations', () => {
it('should handle minimum retention period (1 hour)', async () => {
// Mock app config with less than minimum retention
- mockCtx.interfaceConfig = { temporaryChatRetention: 0.5 }; // Half hour - should be clamped to 1 hour
- mockCtx.isTemporary = true;
+ mockReq.config.interfaceConfig.temporaryChatRetention = 0.5; // Half hour - should be clamped to 1 hour
+
+ mockReq.body = { isTemporary: true };
const beforeSave = new Date();
- const result = await saveConvo(mockCtx, mockConversationData);
+ const result = await saveConvo(mockReq, mockConversationData);
- expect(result?.expiredAt).toBeDefined();
+ expect(result.expiredAt).toBeDefined();
// Verify expiredAt is approximately 1 hour in the future (minimum)
const expectedExpirationTime = new Date(beforeSave.getTime() + 1 * 60 * 60 * 1000);
- const actualExpirationTime = new Date(result?.expiredAt ?? 0);
+ const actualExpirationTime = new Date(result.expiredAt);
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
expectedExpirationTime.getTime() - 1000,
@@ -293,17 +251,18 @@ describe('Conversation Operations', () => {
it('should handle maximum retention period (8760 hours)', async () => {
// Mock app config with more than maximum retention
- mockCtx.interfaceConfig = { temporaryChatRetention: 10000 }; // Should be clamped to 8760 hours
- mockCtx.isTemporary = true;
+ mockReq.config.interfaceConfig.temporaryChatRetention = 10000; // Should be clamped to 8760 hours
+
+ mockReq.body = { isTemporary: true };
const beforeSave = new Date();
- const result = await saveConvo(mockCtx, mockConversationData);
+ const result = await saveConvo(mockReq, mockConversationData);
- expect(result?.expiredAt).toBeDefined();
+ expect(result.expiredAt).toBeDefined();
// Verify expiredAt is approximately 8760 hours (1 year) in the future
const expectedExpirationTime = new Date(beforeSave.getTime() + 8760 * 60 * 60 * 1000);
- const actualExpirationTime = new Date(result?.expiredAt ?? 0);
+ const actualExpirationTime = new Date(result.expiredAt);
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
expectedExpirationTime.getTime() - 1000,
@@ -315,21 +274,22 @@ describe('Conversation Operations', () => {
it('should handle missing config gracefully', async () => {
// Simulate missing config - should use default retention period
- mockCtx.interfaceConfig = undefined;
- mockCtx.isTemporary = true;
+ delete mockReq.config;
+
+ mockReq.body = { isTemporary: true };
const beforeSave = new Date();
- const result = await saveConvo(mockCtx, mockConversationData);
+ const result = await saveConvo(mockReq, mockConversationData);
const afterSave = new Date();
// Should still save the conversation with default retention period (30 days)
- expect(result?.conversationId).toBe(mockConversationData.conversationId);
- expect(result?.expiredAt).toBeDefined();
- expect(result?.expiredAt).toBeInstanceOf(Date);
+ expect(result.conversationId).toBe(mockConversationData.conversationId);
+ expect(result.expiredAt).toBeDefined();
+ expect(result.expiredAt).toBeInstanceOf(Date);
// Verify expiredAt is approximately 30 days in the future (720 hours)
const expectedExpirationTime = new Date(beforeSave.getTime() + 720 * 60 * 60 * 1000);
- const actualExpirationTime = new Date(result?.expiredAt ?? 0);
+ const actualExpirationTime = new Date(result.expiredAt);
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
expectedExpirationTime.getTime() - 1000,
@@ -341,17 +301,18 @@ describe('Conversation Operations', () => {
it('should use default retention when config is not provided', async () => {
// Mock getAppConfig to return empty config
- mockCtx.interfaceConfig = undefined; // Empty config
- mockCtx.isTemporary = true;
+ mockReq.config = {}; // Empty config
+
+ mockReq.body = { isTemporary: true };
const beforeSave = new Date();
- const result = await saveConvo(mockCtx, mockConversationData);
+ const result = await saveConvo(mockReq, mockConversationData);
- expect(result?.expiredAt).toBeDefined();
+ expect(result.expiredAt).toBeDefined();
// Default retention is 30 days (720 hours)
const expectedExpirationTime = new Date(beforeSave.getTime() + 30 * 24 * 60 * 60 * 1000);
- const actualExpirationTime = new Date(result?.expiredAt ?? 0);
+ const actualExpirationTime = new Date(result.expiredAt);
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
expectedExpirationTime.getTime() - 1000,
@@ -363,39 +324,40 @@ describe('Conversation Operations', () => {
it('should update expiredAt when saving existing temporary conversation', async () => {
// First save a temporary conversation
- mockCtx.interfaceConfig = { temporaryChatRetention: 24 };
- mockCtx.isTemporary = true;
- const firstSave = await saveConvo(mockCtx, mockConversationData);
- const originalExpiredAt = firstSave?.expiredAt ?? new Date(0);
+ mockReq.config.interfaceConfig.temporaryChatRetention = 24;
+
+ mockReq.body = { isTemporary: true };
+ const firstSave = await saveConvo(mockReq, mockConversationData);
+ const originalExpiredAt = firstSave.expiredAt;
// Wait a bit to ensure time difference
await new Promise((resolve) => setTimeout(resolve, 100));
// Save again with same conversationId but different title
const updatedData = { ...mockConversationData, title: 'Updated Title' };
- const secondSave = await saveConvo(mockCtx, updatedData);
+ const secondSave = await saveConvo(mockReq, updatedData);
// Should update title and create new expiredAt
- expect(secondSave?.title).toBe('Updated Title');
- expect(secondSave?.expiredAt).toBeDefined();
- expect(new Date(secondSave?.expiredAt ?? 0).getTime()).toBeGreaterThan(
+ expect(secondSave.title).toBe('Updated Title');
+ expect(secondSave.expiredAt).toBeDefined();
+ expect(new Date(secondSave.expiredAt).getTime()).toBeGreaterThan(
new Date(originalExpiredAt).getTime(),
);
});
it('should not set expiredAt when updating non-temporary conversation', async () => {
// First save a non-temporary conversation
- mockCtx.isTemporary = false;
- const firstSave = await saveConvo(mockCtx, mockConversationData);
- expect(firstSave?.expiredAt).toBeNull();
+ mockReq.body = { isTemporary: false };
+ const firstSave = await saveConvo(mockReq, mockConversationData);
+ expect(firstSave.expiredAt).toBeNull();
// Update without isTemporary flag
- mockCtx.isTemporary = undefined;
+ mockReq.body = {};
const updatedData = { ...mockConversationData, title: 'Updated Title' };
- const secondSave = await saveConvo(mockCtx, updatedData);
+ const secondSave = await saveConvo(mockReq, updatedData);
- expect(secondSave?.title).toBe('Updated Title');
- expect(secondSave?.expiredAt).toBeNull();
+ expect(secondSave.title).toBe('Updated Title');
+ expect(secondSave.expiredAt).toBeNull();
});
it('should filter out expired conversations in getConvosByCursor', async () => {
@@ -419,13 +381,13 @@ describe('Conversation Operations', () => {
});
// Mock Meili search
- Object.assign(Conversation, { meiliSearch: jest.fn().mockResolvedValue({ hits: [] }) });
+ Conversation.meiliSearch = jest.fn().mockResolvedValue({ hits: [] });
const result = await getConvosByCursor('user123');
// Should only return conversations with null or non-existent expiredAt
- expect(result?.conversations).toHaveLength(1);
- expect(result?.conversations[0]?.conversationId).toBe(nonExpiredConvo.conversationId);
+ expect(result.conversations).toHaveLength(1);
+ expect(result.conversations[0].conversationId).toBe(nonExpiredConvo.conversationId);
});
it('should filter out expired conversations in getConvosQueried', async () => {
@@ -454,10 +416,10 @@ describe('Conversation Operations', () => {
const result = await getConvosQueried('user123', convoIds);
// Should only return the non-expired conversation
- expect(result?.conversations).toHaveLength(1);
- expect(result?.conversations[0].conversationId).toBe(nonExpiredConvo.conversationId);
- expect(result?.convoMap[nonExpiredConvo.conversationId]).toBeDefined();
- expect(result?.convoMap[expiredConvo.conversationId]).toBeUndefined();
+ expect(result.conversations).toHaveLength(1);
+ expect(result.conversations[0].conversationId).toBe(nonExpiredConvo.conversationId);
+ expect(result.convoMap[nonExpiredConvo.conversationId]).toBeDefined();
+ expect(result.convoMap[expiredConvo.conversationId]).toBeUndefined();
});
});
@@ -473,9 +435,9 @@ describe('Conversation Operations', () => {
const result = await searchConversation(mockConversationData.conversationId);
expect(result).toBeTruthy();
- expect(result!.conversationId).toBe(mockConversationData.conversationId);
- expect(result!.user).toBe('user123');
- expect((result as unknown as { title?: string }).title).toBeUndefined(); // Only returns conversationId and user
+ expect(result.conversationId).toBe(mockConversationData.conversationId);
+ expect(result.user).toBe('user123');
+ expect(result.title).toBeUndefined(); // Only returns conversationId and user
});
it('should return null if conversation not found', async () => {
@@ -495,9 +457,9 @@ describe('Conversation Operations', () => {
const result = await getConvo('user123', mockConversationData.conversationId);
- expect(result!.conversationId).toBe(mockConversationData.conversationId);
- expect(result!.user).toBe('user123');
- expect(result!.title).toBe('Test Conversation');
+ expect(result.conversationId).toBe(mockConversationData.conversationId);
+ expect(result.user).toBe('user123');
+ expect(result.title).toBe('Test Conversation');
});
it('should return null if conversation not found', async () => {
@@ -583,11 +545,10 @@ describe('Conversation Operations', () => {
conversationId: mockConversationData.conversationId,
});
- expect(result?.deletedCount).toBe(1);
- expect(result?.messages.deletedCount).toBe(5);
+ expect(result.deletedCount).toBe(1);
+ expect(result.messages.deletedCount).toBe(5);
expect(deleteMessages).toHaveBeenCalledWith({
conversationId: { $in: [mockConversationData.conversationId] },
- user: 'user123',
});
// Verify conversation was deleted
@@ -620,8 +581,8 @@ describe('Conversation Operations', () => {
const result = await deleteNullOrEmptyConversations();
- expect(result?.conversations.deletedCount).toBe(0); // No invalid conversations to delete
- expect(result?.messages.deletedCount).toBe(0);
+ expect(result.conversations.deletedCount).toBe(0); // No invalid conversations to delete
+ expect(result.messages.deletedCount).toBe(0);
// Verify valid conversation remains
const remainingConvos = await Conversation.find({});
@@ -635,7 +596,7 @@ describe('Conversation Operations', () => {
// Force a database error by disconnecting
await mongoose.disconnect();
- const result = await saveConvo(mockCtx, mockConversationData);
+ const result = await saveConvo(mockReq, mockConversationData);
expect(result).toEqual({ message: 'Error saving conversation' });
@@ -649,7 +610,7 @@ describe('Conversation Operations', () => {
* Helper to create conversations with specific timestamps
* Uses collection.insertOne to bypass Mongoose timestamps entirely
*/
- const createConvoWithTimestamps = async (index: number, createdAt: Date, updatedAt: Date) => {
+ const createConvoWithTimestamps = async (index, createdAt, updatedAt) => {
const conversationId = uuidv4();
// Use collection-level insert to bypass Mongoose timestamps
await Conversation.collection.insertOne({
@@ -668,7 +629,7 @@ describe('Conversation Operations', () => {
it('should not skip conversations at page boundaries', async () => {
// Create 30 conversations to ensure pagination (limit is 25)
const baseTime = new Date('2026-01-01T00:00:00.000Z');
- const convos: unknown[] = [];
+ const convos = [];
for (let i = 0; i < 30; i++) {
const updatedAt = new Date(baseTime.getTime() - i * 60000); // Each 1 minute apart
@@ -694,8 +655,8 @@ describe('Conversation Operations', () => {
// Verify no duplicates and no gaps
const allIds = [
- ...page1.conversations.map((c: IConversation) => c.conversationId),
- ...page2.conversations.map((c: IConversation) => c.conversationId),
+ ...page1.conversations.map((c) => c.conversationId),
+ ...page2.conversations.map((c) => c.conversationId),
];
const uniqueIds = new Set(allIds);
@@ -710,7 +671,7 @@ describe('Conversation Operations', () => {
const baseTime = new Date('2026-01-01T12:00:00.000Z');
// Create exactly 26 conversations
- const convos: (IConversation | null)[] = [];
+ const convos = [];
for (let i = 0; i < 26; i++) {
const updatedAt = new Date(baseTime.getTime() - i * 60000);
const convo = await createConvoWithTimestamps(i, updatedAt, updatedAt);
@@ -727,8 +688,8 @@ describe('Conversation Operations', () => {
expect(page1.nextCursor).toBeTruthy();
// Item 26 should NOT be in page 1
- const page1Ids = page1.conversations.map((c: IConversation) => c.conversationId);
- expect(page1Ids).not.toContain(item26!.conversationId);
+ const page1Ids = page1.conversations.map((c) => c.conversationId);
+ expect(page1Ids).not.toContain(item26.conversationId);
// Fetch second page
const page2 = await getConvosByCursor('user123', {
@@ -738,7 +699,7 @@ describe('Conversation Operations', () => {
// Item 26 MUST be in page 2 (this was the bug - it was being skipped)
expect(page2.conversations).toHaveLength(1);
- expect(page2.conversations[0].conversationId).toBe(item26!.conversationId);
+ expect(page2.conversations[0].conversationId).toBe(item26.conversationId);
});
it('should sort by updatedAt DESC by default', async () => {
@@ -765,10 +726,10 @@ describe('Conversation Operations', () => {
const result = await getConvosByCursor('user123');
// Should be sorted by updatedAt DESC (most recent first)
- expect(result?.conversations).toHaveLength(3);
- expect(result?.conversations[0].conversationId).toBe(convo1!.conversationId); // Jan 3 updatedAt
- expect(result?.conversations[1].conversationId).toBe(convo2!.conversationId); // Jan 2 updatedAt
- expect(result?.conversations[2].conversationId).toBe(convo3!.conversationId); // Jan 1 updatedAt
+ expect(result.conversations).toHaveLength(3);
+ expect(result.conversations[0].conversationId).toBe(convo1.conversationId); // Jan 3 updatedAt
+ expect(result.conversations[1].conversationId).toBe(convo2.conversationId); // Jan 2 updatedAt
+ expect(result.conversations[2].conversationId).toBe(convo3.conversationId); // Jan 1 updatedAt
});
it('should handle conversations with same updatedAt (tie-breaker)', async () => {
@@ -782,12 +743,12 @@ describe('Conversation Operations', () => {
const result = await getConvosByCursor('user123');
// All 3 should be returned (no skipping due to same timestamps)
- expect(result?.conversations).toHaveLength(3);
+ expect(result.conversations).toHaveLength(3);
- const returnedIds = result?.conversations.map((c: IConversation) => c.conversationId);
- expect(returnedIds).toContain(convo1!.conversationId);
- expect(returnedIds).toContain(convo2!.conversationId);
- expect(returnedIds).toContain(convo3!.conversationId);
+ const returnedIds = result.conversations.map((c) => c.conversationId);
+ expect(returnedIds).toContain(convo1.conversationId);
+ expect(returnedIds).toContain(convo2.conversationId);
+ expect(returnedIds).toContain(convo3.conversationId);
});
it('should handle cursor pagination with conversations updated during pagination', async () => {
@@ -844,15 +805,13 @@ describe('Conversation Operations', () => {
const page1 = await getConvosByCursor('user123', { limit: 25 });
// Decode the cursor to verify it's based on the last RETURNED item
- const decodedCursor = JSON.parse(
- Buffer.from(page1.nextCursor as string, 'base64').toString(),
- );
+ const decodedCursor = JSON.parse(Buffer.from(page1.nextCursor, 'base64').toString());
// The cursor should match the last item in page1 (item at index 24)
- const lastReturnedItem = page1.conversations[24] as IConversation;
+ const lastReturnedItem = page1.conversations[24];
expect(new Date(decodedCursor.primary).getTime()).toBe(
- new Date(lastReturnedItem.updatedAt ?? 0).getTime(),
+ new Date(lastReturnedItem.updatedAt).getTime(),
);
});
@@ -871,26 +830,26 @@ describe('Conversation Operations', () => {
);
// Verify timestamps were set correctly
- expect(new Date(convo1!.createdAt ?? 0).getTime()).toBe(
+ expect(new Date(convo1.createdAt).getTime()).toBe(
new Date('2026-01-03T00:00:00.000Z').getTime(),
);
- expect(new Date(convo2!.createdAt ?? 0).getTime()).toBe(
+ expect(new Date(convo2.createdAt).getTime()).toBe(
new Date('2026-01-01T00:00:00.000Z').getTime(),
);
const result = await getConvosByCursor('user123', { sortBy: 'createdAt' });
// Should be sorted by createdAt DESC
- expect(result?.conversations).toHaveLength(2);
- expect(result?.conversations[0].conversationId).toBe(convo1!.conversationId); // Jan 3 createdAt
- expect(result?.conversations[1].conversationId).toBe(convo2!.conversationId); // Jan 1 createdAt
+ expect(result.conversations).toHaveLength(2);
+ expect(result.conversations[0].conversationId).toBe(convo1.conversationId); // Jan 3 createdAt
+ expect(result.conversations[1].conversationId).toBe(convo2.conversationId); // Jan 1 createdAt
});
it('should handle empty result set gracefully', async () => {
const result = await getConvosByCursor('user123');
- expect(result?.conversations).toHaveLength(0);
- expect(result?.nextCursor).toBeNull();
+ expect(result.conversations).toHaveLength(0);
+ expect(result.nextCursor).toBeNull();
});
it('should handle exactly limit number of conversations (no next page)', async () => {
@@ -904,54 +863,8 @@ describe('Conversation Operations', () => {
const result = await getConvosByCursor('user123', { limit: 25 });
- expect(result?.conversations).toHaveLength(25);
- expect(result?.nextCursor).toBeNull(); // No next page
- });
- });
-
- describe('tenantId stripping', () => {
- it('saveConvo should not write caller-supplied tenantId to the document', async () => {
- const conversationId = uuidv4();
- const result = await saveConvo(
- { userId: 'user123' },
- { conversationId, tenantId: 'malicious-tenant', title: 'Tenant Test' },
- );
-
- expect(result).not.toBeNull();
- const doc = await Conversation.findOne({ conversationId }).lean();
- expect(doc).not.toBeNull();
- expect(doc?.title).toBe('Tenant Test');
- expect(doc?.tenantId).toBeUndefined();
- });
-
- it('bulkSaveConvos should not overwrite tenantId via update payload', async () => {
- const conversationId = uuidv4();
-
- await tenantStorage.run({ tenantId: 'real-tenant' }, async () => {
- await Conversation.create({
- conversationId,
- user: 'user123',
- title: 'Original',
- endpoint: EModelEndpoint.openAI,
- });
- });
-
- await tenantStorage.run({ tenantId: 'real-tenant' }, async () => {
- await methods.bulkSaveConvos([
- {
- conversationId,
- user: 'user123',
- title: 'Updated',
- tenantId: 'malicious-tenant',
- endpoint: EModelEndpoint.openAI,
- },
- ]);
- });
-
- const doc = await runAsSystem(async () => Conversation.findOne({ conversationId }).lean());
- expect(doc).not.toBeNull();
- expect(doc?.title).toBe('Updated');
- expect(doc?.tenantId).toBe('real-tenant');
+ expect(result.conversations).toHaveLength(25);
+ expect(result.nextCursor).toBeNull(); // No next page
});
});
});
diff --git a/api/models/ConversationTag.js b/api/models/ConversationTag.js
new file mode 100644
index 0000000000..47a6c2bbf5
--- /dev/null
+++ b/api/models/ConversationTag.js
@@ -0,0 +1,284 @@
+const { logger } = require('@librechat/data-schemas');
+const { ConversationTag, Conversation } = require('~/db/models');
+
+/**
+ * Retrieves all conversation tags for a user.
+ * @param {string} user - The user ID.
+ * @returns {Promise} An array of conversation tags.
+ */
+const getConversationTags = async (user) => {
+ try {
+ return await ConversationTag.find({ user }).sort({ position: 1 }).lean();
+ } catch (error) {
+ logger.error('[getConversationTags] Error getting conversation tags', error);
+ throw new Error('Error getting conversation tags');
+ }
+};
+
+/**
+ * Creates a new conversation tag.
+ * @param {string} user - The user ID.
+ * @param {Object} data - The tag data.
+ * @param {string} data.tag - The tag name.
+ * @param {string} [data.description] - The tag description.
+ * @param {boolean} [data.addToConversation] - Whether to add the tag to a conversation.
+ * @param {string} [data.conversationId] - The conversation ID to add the tag to.
+ * @returns {Promise} The created tag.
+ */
+const createConversationTag = async (user, data) => {
+ try {
+ const { tag, description, addToConversation, conversationId } = data;
+
+ const existingTag = await ConversationTag.findOne({ user, tag }).lean();
+ if (existingTag) {
+ return existingTag;
+ }
+
+ const maxPosition = await ConversationTag.findOne({ user }).sort('-position').lean();
+ const position = (maxPosition?.position || 0) + 1;
+
+ const newTag = await ConversationTag.findOneAndUpdate(
+ { tag, user },
+ {
+ tag,
+ user,
+ count: addToConversation ? 1 : 0,
+ position,
+ description,
+ $setOnInsert: { createdAt: new Date() },
+ },
+ {
+ new: true,
+ upsert: true,
+ lean: true,
+ },
+ );
+
+ if (addToConversation && conversationId) {
+ await Conversation.findOneAndUpdate(
+ { user, conversationId },
+ { $addToSet: { tags: tag } },
+ { new: true },
+ );
+ }
+
+ return newTag;
+ } catch (error) {
+ logger.error('[createConversationTag] Error creating conversation tag', error);
+ throw new Error('Error creating conversation tag');
+ }
+};
+
+/**
+ * Updates an existing conversation tag.
+ * @param {string} user - The user ID.
+ * @param {string} oldTag - The current tag name.
+ * @param {Object} data - The updated tag data.
+ * @param {string} [data.tag] - The new tag name.
+ * @param {string} [data.description] - The updated description.
+ * @param {number} [data.position] - The new position.
+ * @returns {Promise} The updated tag.
+ */
+const updateConversationTag = async (user, oldTag, data) => {
+ try {
+ const { tag: newTag, description, position } = data;
+
+ const existingTag = await ConversationTag.findOne({ user, tag: oldTag }).lean();
+ if (!existingTag) {
+ return null;
+ }
+
+ if (newTag && newTag !== oldTag) {
+ const tagAlreadyExists = await ConversationTag.findOne({ user, tag: newTag }).lean();
+ if (tagAlreadyExists) {
+ throw new Error('Tag already exists');
+ }
+
+ await Conversation.updateMany({ user, tags: oldTag }, { $set: { 'tags.$': newTag } });
+ }
+
+ const updateData = {};
+ if (newTag) {
+ updateData.tag = newTag;
+ }
+ if (description !== undefined) {
+ updateData.description = description;
+ }
+ if (position !== undefined) {
+ await adjustPositions(user, existingTag.position, position);
+ updateData.position = position;
+ }
+
+ return await ConversationTag.findOneAndUpdate({ user, tag: oldTag }, updateData, {
+ new: true,
+ lean: true,
+ });
+ } catch (error) {
+ logger.error('[updateConversationTag] Error updating conversation tag', error);
+ throw new Error('Error updating conversation tag');
+ }
+};
+
+/**
+ * Adjusts positions of tags when a tag's position is changed.
+ * @param {string} user - The user ID.
+ * @param {number} oldPosition - The old position of the tag.
+ * @param {number} newPosition - The new position of the tag.
+ * @returns {Promise}
+ */
+const adjustPositions = async (user, oldPosition, newPosition) => {
+ if (oldPosition === newPosition) {
+ return;
+ }
+
+ const update = oldPosition < newPosition ? { $inc: { position: -1 } } : { $inc: { position: 1 } };
+ const position =
+ oldPosition < newPosition
+ ? {
+ $gt: Math.min(oldPosition, newPosition),
+ $lte: Math.max(oldPosition, newPosition),
+ }
+ : {
+ $gte: Math.min(oldPosition, newPosition),
+ $lt: Math.max(oldPosition, newPosition),
+ };
+
+ await ConversationTag.updateMany(
+ {
+ user,
+ position,
+ },
+ update,
+ );
+};
+
+/**
+ * Deletes a conversation tag.
+ * @param {string} user - The user ID.
+ * @param {string} tag - The tag to delete.
+ * @returns {Promise} The deleted tag.
+ */
+const deleteConversationTag = async (user, tag) => {
+ try {
+ const deletedTag = await ConversationTag.findOneAndDelete({ user, tag }).lean();
+ if (!deletedTag) {
+ return null;
+ }
+
+ await Conversation.updateMany({ user, tags: tag }, { $pull: { tags: tag } });
+
+ await ConversationTag.updateMany(
+ { user, position: { $gt: deletedTag.position } },
+ { $inc: { position: -1 } },
+ );
+
+ return deletedTag;
+ } catch (error) {
+ logger.error('[deleteConversationTag] Error deleting conversation tag', error);
+ throw new Error('Error deleting conversation tag');
+ }
+};
+
+/**
+ * Updates tags for a specific conversation.
+ * @param {string} user - The user ID.
+ * @param {string} conversationId - The conversation ID.
+ * @param {string[]} tags - The new set of tags for the conversation.
+ * @returns {Promise} The updated list of tags for the conversation.
+ */
+const updateTagsForConversation = async (user, conversationId, tags) => {
+ try {
+ const conversation = await Conversation.findOne({ user, conversationId }).lean();
+ if (!conversation) {
+ throw new Error('Conversation not found');
+ }
+
+ const oldTags = new Set(conversation.tags);
+ const newTags = new Set(tags);
+
+ const addedTags = [...newTags].filter((tag) => !oldTags.has(tag));
+ const removedTags = [...oldTags].filter((tag) => !newTags.has(tag));
+
+ const bulkOps = [];
+
+ for (const tag of addedTags) {
+ bulkOps.push({
+ updateOne: {
+ filter: { user, tag },
+ update: { $inc: { count: 1 } },
+ upsert: true,
+ },
+ });
+ }
+
+ for (const tag of removedTags) {
+ bulkOps.push({
+ updateOne: {
+ filter: { user, tag },
+ update: { $inc: { count: -1 } },
+ },
+ });
+ }
+
+ if (bulkOps.length > 0) {
+ await ConversationTag.bulkWrite(bulkOps);
+ }
+
+ const updatedConversation = (
+ await Conversation.findOneAndUpdate(
+ { user, conversationId },
+ { $set: { tags: [...newTags] } },
+ { new: true },
+ )
+ ).toObject();
+
+ return updatedConversation.tags;
+ } catch (error) {
+ logger.error('[updateTagsForConversation] Error updating tags', error);
+ throw new Error('Error updating tags for conversation');
+ }
+};
+
+/**
+ * Increments tag counts for existing tags only.
+ * @param {string} user - The user ID.
+ * @param {string[]} tags - Array of tag names to increment
+ * @returns {Promise}
+ */
+const bulkIncrementTagCounts = async (user, tags) => {
+ if (!tags || tags.length === 0) {
+ return;
+ }
+
+ try {
+ const uniqueTags = [...new Set(tags.filter(Boolean))];
+ if (uniqueTags.length === 0) {
+ return;
+ }
+
+ const bulkOps = uniqueTags.map((tag) => ({
+ updateOne: {
+ filter: { user, tag },
+ update: { $inc: { count: 1 } },
+ },
+ }));
+
+ const result = await ConversationTag.bulkWrite(bulkOps);
+ if (result && result.modifiedCount > 0) {
+ logger.debug(
+ `user: ${user} | Incremented tag counts - modified ${result.modifiedCount} tags`,
+ );
+ }
+ } catch (error) {
+ logger.error('[bulkIncrementTagCounts] Error incrementing tag counts', error);
+ }
+};
+
+module.exports = {
+ getConversationTags,
+ createConversationTag,
+ updateConversationTag,
+ deleteConversationTag,
+ bulkIncrementTagCounts,
+ updateTagsForConversation,
+};
diff --git a/api/models/File.js b/api/models/File.js
new file mode 100644
index 0000000000..1a01ef12f9
--- /dev/null
+++ b/api/models/File.js
@@ -0,0 +1,250 @@
+const { logger } = require('@librechat/data-schemas');
+const { EToolResources, FileContext } = require('librechat-data-provider');
+const { File } = require('~/db/models');
+
+/**
+ * Finds a file by its file_id with additional query options.
+ * @param {string} file_id - The unique identifier of the file.
+ * @param {object} options - Query options for filtering, projection, etc.
+ * @returns {Promise} A promise that resolves to the file document or null.
+ */
+const findFileById = async (file_id, options = {}) => {
+ return await File.findOne({ file_id, ...options }).lean();
+};
+
+/**
+ * Retrieves files matching a given filter, sorted by the most recently updated.
+ * @param {Object} filter - The filter criteria to apply.
+ * @param {Object} [_sortOptions] - Optional sort parameters.
+ * @param {Object|String} [selectFields={ text: 0 }] - Fields to include/exclude in the query results.
+ * Default excludes the 'text' field.
+ * @returns {Promise>} A promise that resolves to an array of file documents.
+ */
+const getFiles = async (filter, _sortOptions, selectFields = { text: 0 }) => {
+ const sortOptions = { updatedAt: -1, ..._sortOptions };
+ return await File.find(filter).select(selectFields).sort(sortOptions).lean();
+};
+
+/**
+ * Retrieves tool files (files that are embedded or have a fileIdentifier) from an array of file IDs.
+ * Note: execute_code files are handled separately by getCodeGeneratedFiles.
+ * @param {string[]} fileIds - Array of file_id strings to search for
+ * @param {Set} toolResourceSet - Optional filter for tool resources
+ * @returns {Promise>} Files that match the criteria
+ */
+const getToolFilesByIds = async (fileIds, toolResourceSet) => {
+ if (!fileIds || !fileIds.length || !toolResourceSet?.size) {
+ return [];
+ }
+
+ try {
+ const orConditions = [];
+
+ if (toolResourceSet.has(EToolResources.context)) {
+ orConditions.push({ text: { $exists: true, $ne: null }, context: FileContext.agents });
+ }
+ if (toolResourceSet.has(EToolResources.file_search)) {
+ orConditions.push({ embedded: true });
+ }
+
+ if (orConditions.length === 0) {
+ return [];
+ }
+
+ const filter = {
+ file_id: { $in: fileIds },
+ context: { $ne: FileContext.execute_code }, // Exclude code-generated files
+ $or: orConditions,
+ };
+
+ const selectFields = { text: 0 };
+ const sortOptions = { updatedAt: -1 };
+
+ return await getFiles(filter, sortOptions, selectFields);
+ } catch (error) {
+ logger.error('[getToolFilesByIds] Error retrieving tool files:', error);
+ throw new Error('Error retrieving tool files');
+ }
+};
+
+/**
+ * Retrieves files generated by code execution for a given conversation.
+ * These files are stored locally with fileIdentifier metadata for code env re-upload.
+ * @param {string} conversationId - The conversation ID to search for
+ * @param {string[]} [messageIds] - Optional array of messageIds to filter by (for linear thread filtering)
+ * @returns {Promise>} Files generated by code execution in the conversation
+ */
+const getCodeGeneratedFiles = async (conversationId, messageIds) => {
+ if (!conversationId) {
+ return [];
+ }
+
+ /** messageIds are required for proper thread filtering of code-generated files */
+ if (!messageIds || messageIds.length === 0) {
+ return [];
+ }
+
+ try {
+ const filter = {
+ conversationId,
+ context: FileContext.execute_code,
+ messageId: { $exists: true, $in: messageIds },
+ 'metadata.fileIdentifier': { $exists: true },
+ };
+
+ const selectFields = { text: 0 };
+ const sortOptions = { createdAt: 1 };
+
+ return await getFiles(filter, sortOptions, selectFields);
+ } catch (error) {
+ logger.error('[getCodeGeneratedFiles] Error retrieving code generated files:', error);
+ return [];
+ }
+};
+
+/**
+ * Retrieves user-uploaded execute_code files (not code-generated) by their file IDs.
+ * These are files with fileIdentifier metadata but context is NOT execute_code (e.g., agents or message_attachment).
+ * File IDs should be collected from message.files arrays in the current thread.
+ * @param {string[]} fileIds - Array of file IDs to fetch (from message.files in the thread)
+ * @returns {Promise>} User-uploaded execute_code files
+ */
+const getUserCodeFiles = async (fileIds) => {
+ if (!fileIds || fileIds.length === 0) {
+ return [];
+ }
+
+ try {
+ const filter = {
+ file_id: { $in: fileIds },
+ context: { $ne: FileContext.execute_code },
+ 'metadata.fileIdentifier': { $exists: true },
+ };
+
+ const selectFields = { text: 0 };
+ const sortOptions = { createdAt: 1 };
+
+ return await getFiles(filter, sortOptions, selectFields);
+ } catch (error) {
+ logger.error('[getUserCodeFiles] Error retrieving user code files:', error);
+ return [];
+ }
+};
+
+/**
+ * Creates a new file with a TTL of 1 hour.
+ * @param {MongoFile} data - The file data to be created, must contain file_id.
+ * @param {boolean} disableTTL - Whether to disable the TTL.
+ * @returns {Promise} A promise that resolves to the created file document.
+ */
+const createFile = async (data, disableTTL) => {
+ const fileData = {
+ ...data,
+ expiresAt: new Date(Date.now() + 3600 * 1000),
+ };
+
+ if (disableTTL) {
+ delete fileData.expiresAt;
+ }
+
+ return await File.findOneAndUpdate({ file_id: data.file_id }, fileData, {
+ new: true,
+ upsert: true,
+ }).lean();
+};
+
+/**
+ * Updates a file identified by file_id with new data and removes the TTL.
+ * @param {MongoFile} data - The data to update, must contain file_id.
+ * @returns {Promise} A promise that resolves to the updated file document.
+ */
+const updateFile = async (data) => {
+ const { file_id, ...update } = data;
+ const updateOperation = {
+ $set: update,
+ $unset: { expiresAt: '' }, // Remove the expiresAt field to prevent TTL
+ };
+ return await File.findOneAndUpdate({ file_id }, updateOperation, { new: true }).lean();
+};
+
+/**
+ * Increments the usage of a file identified by file_id.
+ * @param {MongoFile} data - The data to update, must contain file_id and the increment value for usage.
+ * @returns {Promise} A promise that resolves to the updated file document.
+ */
+const updateFileUsage = async (data) => {
+ const { file_id, inc = 1 } = data;
+ const updateOperation = {
+ $inc: { usage: inc },
+ $unset: { expiresAt: '', temp_file_id: '' },
+ };
+ return await File.findOneAndUpdate({ file_id }, updateOperation, { new: true }).lean();
+};
+
+/**
+ * Deletes a file identified by file_id.
+ * @param {string} file_id - The unique identifier of the file to delete.
+ * @returns {Promise} A promise that resolves to the deleted file document or null.
+ */
+const deleteFile = async (file_id) => {
+ return await File.findOneAndDelete({ file_id }).lean();
+};
+
+/**
+ * Deletes a file identified by a filter.
+ * @param {object} filter - The filter criteria to apply.
+ * @returns {Promise} A promise that resolves to the deleted file document or null.
+ */
+const deleteFileByFilter = async (filter) => {
+ return await File.findOneAndDelete(filter).lean();
+};
+
+/**
+ * Deletes multiple files identified by an array of file_ids.
+ * @param {Array} file_ids - The unique identifiers of the files to delete.
+ * @returns {Promise} A promise that resolves to the result of the deletion operation.
+ */
+const deleteFiles = async (file_ids, user) => {
+ let deleteQuery = { file_id: { $in: file_ids } };
+ if (user) {
+ deleteQuery = { user: user };
+ }
+ return await File.deleteMany(deleteQuery);
+};
+
+/**
+ * Batch updates files with new signed URLs in MongoDB
+ *
+ * @param {MongoFile[]} updates - Array of updates in the format { file_id, filepath }
+ * @returns {Promise}
+ */
+async function batchUpdateFiles(updates) {
+ if (!updates || updates.length === 0) {
+ return;
+ }
+
+ const bulkOperations = updates.map((update) => ({
+ updateOne: {
+ filter: { file_id: update.file_id },
+ update: { $set: { filepath: update.filepath } },
+ },
+ }));
+
+ const result = await File.bulkWrite(bulkOperations);
+ logger.info(`Updated ${result.modifiedCount} files with new S3 URLs`);
+}
+
+module.exports = {
+ findFileById,
+ getFiles,
+ getToolFilesByIds,
+ getCodeGeneratedFiles,
+ getUserCodeFiles,
+ createFile,
+ updateFile,
+ updateFileUsage,
+ deleteFile,
+ deleteFiles,
+ deleteFileByFilter,
+ batchUpdateFiles,
+};
diff --git a/api/models/File.spec.js b/api/models/File.spec.js
new file mode 100644
index 0000000000..2d4282cff7
--- /dev/null
+++ b/api/models/File.spec.js
@@ -0,0 +1,629 @@
+const mongoose = require('mongoose');
+const { v4: uuidv4 } = require('uuid');
+const { MongoMemoryServer } = require('mongodb-memory-server');
+const { createModels, createMethods } = require('@librechat/data-schemas');
+const {
+ SystemRoles,
+ ResourceType,
+ AccessRoleIds,
+ PrincipalType,
+} = require('librechat-data-provider');
+const { grantPermission } = require('~/server/services/PermissionService');
+const { createAgent } = require('./Agent');
+
+let File;
+let Agent;
+let AclEntry;
+let User;
+let modelsToCleanup = [];
+let methods;
+let getFiles;
+let createFile;
+let seedDefaultRoles;
+
+describe('File Access Control', () => {
+ let mongoServer;
+
+ beforeAll(async () => {
+ mongoServer = await MongoMemoryServer.create();
+ const mongoUri = mongoServer.getUri();
+ await mongoose.connect(mongoUri);
+
+ // Initialize all models
+ const models = createModels(mongoose);
+
+ // Track which models we're adding
+ modelsToCleanup = Object.keys(models);
+
+ // Register models on mongoose.models so methods can access them
+ const dbModels = require('~/db/models');
+ Object.assign(mongoose.models, dbModels);
+
+ File = dbModels.File;
+ Agent = dbModels.Agent;
+ AclEntry = dbModels.AclEntry;
+ User = dbModels.User;
+
+ // Create methods from data-schemas (includes file methods)
+ methods = createMethods(mongoose);
+ getFiles = methods.getFiles;
+ createFile = methods.createFile;
+ seedDefaultRoles = methods.seedDefaultRoles;
+
+ // Seed default roles
+ await seedDefaultRoles();
+ });
+
+ afterAll(async () => {
+ // Clean up all collections before disconnecting
+ const collections = mongoose.connection.collections;
+ for (const key in collections) {
+ await collections[key].deleteMany({});
+ }
+
+ // Clear only the models we added
+ for (const modelName of modelsToCleanup) {
+ if (mongoose.models[modelName]) {
+ delete mongoose.models[modelName];
+ }
+ }
+
+ await mongoose.disconnect();
+ await mongoServer.stop();
+ });
+
+ beforeEach(async () => {
+ await File.deleteMany({});
+ await Agent.deleteMany({});
+ await AclEntry.deleteMany({});
+ await User.deleteMany({});
+ // Don't delete AccessRole as they are seeded defaults needed for tests
+ });
+
+ describe('hasAccessToFilesViaAgent', () => {
+ it('should efficiently check access for multiple files at once', async () => {
+ const userId = new mongoose.Types.ObjectId();
+ const authorId = new mongoose.Types.ObjectId();
+ const agentId = uuidv4();
+ const fileIds = [uuidv4(), uuidv4(), uuidv4(), uuidv4()];
+
+ // Create users
+ await User.create({
+ _id: userId,
+ email: 'user@example.com',
+ emailVerified: true,
+ provider: 'local',
+ });
+
+ await User.create({
+ _id: authorId,
+ email: 'author@example.com',
+ emailVerified: true,
+ provider: 'local',
+ });
+
+ // Create files
+ for (const fileId of fileIds) {
+ await createFile({
+ user: authorId,
+ file_id: fileId,
+ filename: `file-${fileId}.txt`,
+ filepath: `/uploads/${fileId}`,
+ });
+ }
+
+ // Create agent with only first two files attached
+ const agent = await createAgent({
+ id: agentId,
+ name: 'Test Agent',
+ author: authorId,
+ model: 'gpt-4',
+ provider: 'openai',
+ tool_resources: {
+ file_search: {
+ file_ids: [fileIds[0], fileIds[1]],
+ },
+ },
+ });
+
+ // Grant EDIT permission to user on the agent
+ await grantPermission({
+ principalType: PrincipalType.USER,
+ principalId: userId,
+ resourceType: ResourceType.AGENT,
+ resourceId: agent._id,
+ accessRoleId: AccessRoleIds.AGENT_EDITOR,
+ grantedBy: authorId,
+ });
+
+ // Check access for all files
+ const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
+ const accessMap = await hasAccessToFilesViaAgent({
+ userId: userId,
+ role: SystemRoles.USER,
+ fileIds,
+ agentId: agent.id, // Use agent.id which is the custom UUID
+ });
+
+ // Should have access only to the first two files
+ expect(accessMap.get(fileIds[0])).toBe(true);
+ expect(accessMap.get(fileIds[1])).toBe(true);
+ expect(accessMap.get(fileIds[2])).toBe(false);
+ expect(accessMap.get(fileIds[3])).toBe(false);
+ });
+
+ it('should grant access to all files when user is the agent author', async () => {
+ const authorId = new mongoose.Types.ObjectId();
+ const agentId = uuidv4();
+ const fileIds = [uuidv4(), uuidv4(), uuidv4()];
+
+ // Create author user
+ await User.create({
+ _id: authorId,
+ email: 'author@example.com',
+ emailVerified: true,
+ provider: 'local',
+ });
+
+ // Create agent
+ await createAgent({
+ id: agentId,
+ name: 'Test Agent',
+ author: authorId,
+ model: 'gpt-4',
+ provider: 'openai',
+ tool_resources: {
+ file_search: {
+ file_ids: [fileIds[0]], // Only one file attached
+ },
+ },
+ });
+
+ // Check access as the author
+ const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
+ const accessMap = await hasAccessToFilesViaAgent({
+ userId: authorId,
+ role: SystemRoles.USER,
+ fileIds,
+ agentId,
+ });
+
+ // Author should have access to all files
+ expect(accessMap.get(fileIds[0])).toBe(true);
+ expect(accessMap.get(fileIds[1])).toBe(true);
+ expect(accessMap.get(fileIds[2])).toBe(true);
+ });
+
+ it('should handle non-existent agent gracefully', async () => {
+ const userId = new mongoose.Types.ObjectId();
+ const fileIds = [uuidv4(), uuidv4()];
+
+ // Create user
+ await User.create({
+ _id: userId,
+ email: 'user@example.com',
+ emailVerified: true,
+ provider: 'local',
+ });
+
+ const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
+ const accessMap = await hasAccessToFilesViaAgent({
+ userId: userId,
+ role: SystemRoles.USER,
+ fileIds,
+ agentId: 'non-existent-agent',
+ });
+
+ // Should have no access to any files
+ expect(accessMap.get(fileIds[0])).toBe(false);
+ expect(accessMap.get(fileIds[1])).toBe(false);
+ });
+
+ it('should deny access when user only has VIEW permission and needs access for deletion', async () => {
+ const userId = new mongoose.Types.ObjectId();
+ const authorId = new mongoose.Types.ObjectId();
+ const agentId = uuidv4();
+ const fileIds = [uuidv4(), uuidv4()];
+
+ // Create users
+ await User.create({
+ _id: userId,
+ email: 'user@example.com',
+ emailVerified: true,
+ provider: 'local',
+ });
+
+ await User.create({
+ _id: authorId,
+ email: 'author@example.com',
+ emailVerified: true,
+ provider: 'local',
+ });
+
+ // Create agent with files
+ const agent = await createAgent({
+ id: agentId,
+ name: 'View-Only Agent',
+ author: authorId,
+ model: 'gpt-4',
+ provider: 'openai',
+ tool_resources: {
+ file_search: {
+ file_ids: fileIds,
+ },
+ },
+ });
+
+ // Grant only VIEW permission to user on the agent
+ await grantPermission({
+ principalType: PrincipalType.USER,
+ principalId: userId,
+ resourceType: ResourceType.AGENT,
+ resourceId: agent._id,
+ accessRoleId: AccessRoleIds.AGENT_VIEWER,
+ grantedBy: authorId,
+ });
+
+ // Check access for files
+ const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
+ const accessMap = await hasAccessToFilesViaAgent({
+ userId: userId,
+ role: SystemRoles.USER,
+ fileIds,
+ agentId,
+ isDelete: true,
+ });
+
+ // Should have no access to any files when only VIEW permission
+ expect(accessMap.get(fileIds[0])).toBe(false);
+ expect(accessMap.get(fileIds[1])).toBe(false);
+ });
+
+ it('should grant access when user has VIEW permission', async () => {
+ const userId = new mongoose.Types.ObjectId();
+ const authorId = new mongoose.Types.ObjectId();
+ const agentId = uuidv4();
+ const fileIds = [uuidv4(), uuidv4()];
+
+ // Create users
+ await User.create({
+ _id: userId,
+ email: 'user@example.com',
+ emailVerified: true,
+ provider: 'local',
+ });
+
+ await User.create({
+ _id: authorId,
+ email: 'author@example.com',
+ emailVerified: true,
+ provider: 'local',
+ });
+
+ // Create agent with files
+ const agent = await createAgent({
+ id: agentId,
+ name: 'View-Only Agent',
+ author: authorId,
+ model: 'gpt-4',
+ provider: 'openai',
+ tool_resources: {
+ file_search: {
+ file_ids: fileIds,
+ },
+ },
+ });
+
+ // Grant only VIEW permission to user on the agent
+ await grantPermission({
+ principalType: PrincipalType.USER,
+ principalId: userId,
+ resourceType: ResourceType.AGENT,
+ resourceId: agent._id,
+ accessRoleId: AccessRoleIds.AGENT_VIEWER,
+ grantedBy: authorId,
+ });
+
+ // Check access for files
+ const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
+ const accessMap = await hasAccessToFilesViaAgent({
+ userId: userId,
+ role: SystemRoles.USER,
+ fileIds,
+ agentId,
+ });
+
+ expect(accessMap.get(fileIds[0])).toBe(true);
+ expect(accessMap.get(fileIds[1])).toBe(true);
+ });
+ });
+
+ describe('getFiles with agent access control', () => {
+ test('should return files owned by user and files accessible through agent', async () => {
+ const authorId = new mongoose.Types.ObjectId();
+ const userId = new mongoose.Types.ObjectId();
+ const agentId = `agent_${uuidv4()}`;
+ const ownedFileId = `file_${uuidv4()}`;
+ const sharedFileId = `file_${uuidv4()}`;
+ const inaccessibleFileId = `file_${uuidv4()}`;
+
+ // Create users
+ await User.create({
+ _id: userId,
+ email: 'user@example.com',
+ emailVerified: true,
+ provider: 'local',
+ });
+
+ await User.create({
+ _id: authorId,
+ email: 'author@example.com',
+ emailVerified: true,
+ provider: 'local',
+ });
+
+ // Create agent with shared file
+ const agent = await createAgent({
+ id: agentId,
+ name: 'Shared Agent',
+ provider: 'test',
+ model: 'test-model',
+ author: authorId,
+ tool_resources: {
+ file_search: {
+ file_ids: [sharedFileId],
+ },
+ },
+ });
+
+ // Grant EDIT permission to user on the agent
+ await grantPermission({
+ principalType: PrincipalType.USER,
+ principalId: userId,
+ resourceType: ResourceType.AGENT,
+ resourceId: agent._id,
+ accessRoleId: AccessRoleIds.AGENT_EDITOR,
+ grantedBy: authorId,
+ });
+
+ // Create files
+ await createFile({
+ file_id: ownedFileId,
+ user: userId,
+ filename: 'owned.txt',
+ filepath: '/uploads/owned.txt',
+ type: 'text/plain',
+ bytes: 100,
+ });
+
+ await createFile({
+ file_id: sharedFileId,
+ user: authorId,
+ filename: 'shared.txt',
+ filepath: '/uploads/shared.txt',
+ type: 'text/plain',
+ bytes: 200,
+ embedded: true,
+ });
+
+ await createFile({
+ file_id: inaccessibleFileId,
+ user: authorId,
+ filename: 'inaccessible.txt',
+ filepath: '/uploads/inaccessible.txt',
+ type: 'text/plain',
+ bytes: 300,
+ });
+
+ // Get all files first
+ const allFiles = await getFiles(
+ { file_id: { $in: [ownedFileId, sharedFileId, inaccessibleFileId] } },
+ null,
+ { text: 0 },
+ );
+
+ // Then filter by access control
+ const { filterFilesByAgentAccess } = require('~/server/services/Files/permissions');
+ const files = await filterFilesByAgentAccess({
+ files: allFiles,
+ userId: userId,
+ role: SystemRoles.USER,
+ agentId,
+ });
+
+ expect(files).toHaveLength(2);
+ expect(files.map((f) => f.file_id)).toContain(ownedFileId);
+ expect(files.map((f) => f.file_id)).toContain(sharedFileId);
+ expect(files.map((f) => f.file_id)).not.toContain(inaccessibleFileId);
+ });
+
+ test('should return all files when no userId/agentId provided', async () => {
+ const userId = new mongoose.Types.ObjectId();
+ const fileId1 = `file_${uuidv4()}`;
+ const fileId2 = `file_${uuidv4()}`;
+
+ await createFile({
+ file_id: fileId1,
+ user: userId,
+ filename: 'file1.txt',
+ filepath: '/uploads/file1.txt',
+ type: 'text/plain',
+ bytes: 100,
+ });
+
+ await createFile({
+ file_id: fileId2,
+ user: new mongoose.Types.ObjectId(),
+ filename: 'file2.txt',
+ filepath: '/uploads/file2.txt',
+ type: 'text/plain',
+ bytes: 200,
+ });
+
+ const files = await getFiles({ file_id: { $in: [fileId1, fileId2] } });
+ expect(files).toHaveLength(2);
+ });
+ });
+
+ describe('Role-based file permissions', () => {
+ it('should optimize permission checks when role is provided', async () => {
+ const userId = new mongoose.Types.ObjectId();
+ const authorId = new mongoose.Types.ObjectId();
+ const agentId = uuidv4();
+ const fileIds = [uuidv4(), uuidv4()];
+
+ // Create users
+ await User.create({
+ _id: userId,
+ email: 'user@example.com',
+ emailVerified: true,
+ provider: 'local',
+ role: 'ADMIN', // User has ADMIN role
+ });
+
+ await User.create({
+ _id: authorId,
+ email: 'author@example.com',
+ emailVerified: true,
+ provider: 'local',
+ });
+
+ // Create files
+ for (const fileId of fileIds) {
+ await createFile({
+ file_id: fileId,
+ user: authorId,
+ filename: `${fileId}.txt`,
+ filepath: `/uploads/${fileId}.txt`,
+ type: 'text/plain',
+ bytes: 100,
+ });
+ }
+
+ // Create agent with files
+ const agent = await createAgent({
+ id: agentId,
+ name: 'Test Agent',
+ author: authorId,
+ model: 'gpt-4',
+ provider: 'openai',
+ tool_resources: {
+ file_search: {
+ file_ids: fileIds,
+ },
+ },
+ });
+
+ // Grant permission to ADMIN role
+ await grantPermission({
+ principalType: PrincipalType.ROLE,
+ principalId: 'ADMIN',
+ resourceType: ResourceType.AGENT,
+ resourceId: agent._id,
+ accessRoleId: AccessRoleIds.AGENT_EDITOR,
+ grantedBy: authorId,
+ });
+
+ // Check access with role provided (should avoid DB query)
+ const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
+ const accessMapWithRole = await hasAccessToFilesViaAgent({
+ userId: userId,
+ role: 'ADMIN',
+ fileIds,
+ agentId: agent.id,
+ });
+
+ // User should have access through their ADMIN role
+ expect(accessMapWithRole.get(fileIds[0])).toBe(true);
+ expect(accessMapWithRole.get(fileIds[1])).toBe(true);
+
+ // Check access without role (will query DB to get user's role)
+ const accessMapWithoutRole = await hasAccessToFilesViaAgent({
+ userId: userId,
+ fileIds,
+ agentId: agent.id,
+ });
+
+ // Should have same result
+ expect(accessMapWithoutRole.get(fileIds[0])).toBe(true);
+ expect(accessMapWithoutRole.get(fileIds[1])).toBe(true);
+ });
+
+ it('should deny access when user role changes', async () => {
+ const userId = new mongoose.Types.ObjectId();
+ const authorId = new mongoose.Types.ObjectId();
+ const agentId = uuidv4();
+ const fileId = uuidv4();
+
+ // Create users
+ await User.create({
+ _id: userId,
+ email: 'user@example.com',
+ emailVerified: true,
+ provider: 'local',
+ role: 'EDITOR',
+ });
+
+ await User.create({
+ _id: authorId,
+ email: 'author@example.com',
+ emailVerified: true,
+ provider: 'local',
+ });
+
+ // Create file
+ await createFile({
+ file_id: fileId,
+ user: authorId,
+ filename: 'test.txt',
+ filepath: '/uploads/test.txt',
+ type: 'text/plain',
+ bytes: 100,
+ });
+
+ // Create agent
+ const agent = await createAgent({
+ id: agentId,
+ name: 'Test Agent',
+ author: authorId,
+ model: 'gpt-4',
+ provider: 'openai',
+ tool_resources: {
+ file_search: {
+ file_ids: [fileId],
+ },
+ },
+ });
+
+ // Grant permission to EDITOR role only
+ await grantPermission({
+ principalType: PrincipalType.ROLE,
+ principalId: 'EDITOR',
+ resourceType: ResourceType.AGENT,
+ resourceId: agent._id,
+ accessRoleId: AccessRoleIds.AGENT_EDITOR,
+ grantedBy: authorId,
+ });
+
+ const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
+
+ // Check with EDITOR role - should have access
+ const accessAsEditor = await hasAccessToFilesViaAgent({
+ userId: userId,
+ role: 'EDITOR',
+ fileIds: [fileId],
+ agentId: agent.id,
+ });
+ expect(accessAsEditor.get(fileId)).toBe(true);
+
+ // Simulate role change to USER - should lose access
+ const accessAsUser = await hasAccessToFilesViaAgent({
+ userId: userId,
+ role: SystemRoles.USER,
+ fileIds: [fileId],
+ agentId: agent.id,
+ });
+ expect(accessAsUser.get(fileId)).toBe(false);
+ });
+ });
+});
diff --git a/api/models/Message.js b/api/models/Message.js
new file mode 100644
index 0000000000..8fe04f6f54
--- /dev/null
+++ b/api/models/Message.js
@@ -0,0 +1,372 @@
+const { z } = require('zod');
+const { logger } = require('@librechat/data-schemas');
+const { createTempChatExpirationDate } = require('@librechat/api');
+const { Message } = require('~/db/models');
+
+const idSchema = z.string().uuid();
+
+/**
+ * Saves a message in the database.
+ *
+ * @async
+ * @function saveMessage
+ * @param {ServerRequest} req - The request object containing user information.
+ * @param {Object} params - The message data object.
+ * @param {string} params.endpoint - The endpoint where the message originated.
+ * @param {string} params.iconURL - The URL of the sender's icon.
+ * @param {string} params.messageId - The unique identifier for the message.
+ * @param {string} params.newMessageId - The new unique identifier for the message (if applicable).
+ * @param {string} params.conversationId - The identifier of the conversation.
+ * @param {string} [params.parentMessageId] - The identifier of the parent message, if any.
+ * @param {string} params.sender - The identifier of the sender.
+ * @param {string} params.text - The text content of the message.
+ * @param {boolean} params.isCreatedByUser - Indicates if the message was created by the user.
+ * @param {string} [params.error] - Any error associated with the message.
+ * @param {boolean} [params.unfinished] - Indicates if the message is unfinished.
+ * @param {Object[]} [params.files] - An array of files associated with the message.
+ * @param {string} [params.finish_reason] - Reason for finishing the message.
+ * @param {number} [params.tokenCount] - The number of tokens in the message.
+ * @param {string} [params.plugin] - Plugin associated with the message.
+ * @param {string[]} [params.plugins] - An array of plugins associated with the message.
+ * @param {string} [params.model] - The model used to generate the message.
+ * @param {Object} [metadata] - Additional metadata for this operation
+ * @param {string} [metadata.context] - The context of the operation
+ * @returns {Promise} The updated or newly inserted message document.
+ * @throws {Error} If there is an error in saving the message.
+ */
+async function saveMessage(req, params, metadata) {
+ if (!req?.user?.id) {
+ throw new Error('User not authenticated');
+ }
+
+ const validConvoId = idSchema.safeParse(params.conversationId);
+ if (!validConvoId.success) {
+ logger.warn(`Invalid conversation ID: ${params.conversationId}`);
+ logger.info(`---\`saveMessage\` context: ${metadata?.context}`);
+ logger.info(`---Invalid conversation ID Params: ${JSON.stringify(params, null, 2)}`);
+ return;
+ }
+
+ try {
+ const update = {
+ ...params,
+ user: req.user.id,
+ messageId: params.newMessageId || params.messageId,
+ };
+
+ if (req?.body?.isTemporary) {
+ try {
+ const appConfig = req.config;
+ update.expiredAt = createTempChatExpirationDate(appConfig?.interfaceConfig);
+ } catch (err) {
+ logger.error('Error creating temporary chat expiration date:', err);
+ logger.info(`---\`saveMessage\` context: ${metadata?.context}`);
+ update.expiredAt = null;
+ }
+ } else {
+ update.expiredAt = null;
+ }
+
+ if (update.tokenCount != null && isNaN(update.tokenCount)) {
+ logger.warn(
+ `Resetting invalid \`tokenCount\` for message \`${params.messageId}\`: ${update.tokenCount}`,
+ );
+ logger.info(`---\`saveMessage\` context: ${metadata?.context}`);
+ update.tokenCount = 0;
+ }
+ const message = await Message.findOneAndUpdate(
+ { messageId: params.messageId, user: req.user.id },
+ update,
+ { upsert: true, new: true },
+ );
+
+ return message.toObject();
+ } catch (err) {
+ logger.error('Error saving message:', err);
+ logger.info(`---\`saveMessage\` context: ${metadata?.context}`);
+
+ // Check if this is a duplicate key error (MongoDB error code 11000)
+ if (err.code === 11000 && err.message.includes('duplicate key error')) {
+ // Log the duplicate key error but don't crash the application
+ logger.warn(`Duplicate messageId detected: ${params.messageId}. Continuing execution.`);
+
+ try {
+ // Try to find the existing message with this ID
+ const existingMessage = await Message.findOne({
+ messageId: params.messageId,
+ user: req.user.id,
+ });
+
+ // If we found it, return it
+ if (existingMessage) {
+ return existingMessage.toObject();
+ }
+
+ // If we can't find it (unlikely but possible in race conditions)
+ return {
+ ...params,
+ messageId: params.messageId,
+ user: req.user.id,
+ };
+ } catch (findError) {
+ // If the findOne also fails, log it but don't crash
+ logger.warn(
+ `Could not retrieve existing message with ID ${params.messageId}: ${findError.message}`,
+ );
+ return {
+ ...params,
+ messageId: params.messageId,
+ user: req.user.id,
+ };
+ }
+ }
+
+ throw err; // Re-throw other errors
+ }
+}
+
+/**
+ * Saves multiple messages in the database in bulk.
+ *
+ * @async
+ * @function bulkSaveMessages
+ * @param {Object[]} messages - An array of message objects to save.
+ * @param {boolean} [overrideTimestamp=false] - Indicates whether to override the timestamps of the messages. Defaults to false.
+ * @returns {Promise} The result of the bulk write operation.
+ * @throws {Error} If there is an error in saving messages in bulk.
+ */
+async function bulkSaveMessages(messages, overrideTimestamp = false) {
+ try {
+ const bulkOps = messages.map((message) => ({
+ updateOne: {
+ filter: { messageId: message.messageId },
+ update: message,
+ timestamps: !overrideTimestamp,
+ upsert: true,
+ },
+ }));
+ const result = await Message.bulkWrite(bulkOps);
+ return result;
+ } catch (err) {
+ logger.error('Error saving messages in bulk:', err);
+ throw err;
+ }
+}
+
+/**
+ * Records a message in the database.
+ *
+ * @async
+ * @function recordMessage
+ * @param {Object} params - The message data object.
+ * @param {string} params.user - The identifier of the user.
+ * @param {string} params.endpoint - The endpoint where the message originated.
+ * @param {string} params.messageId - The unique identifier for the message.
+ * @param {string} params.conversationId - The identifier of the conversation.
+ * @param {string} [params.parentMessageId] - The identifier of the parent message, if any.
+ * @param {Partial} rest - Any additional properties from the TMessage typedef not explicitly listed.
+ * @returns {Promise} The updated or newly inserted message document.
+ * @throws {Error} If there is an error in saving the message.
+ */
+async function recordMessage({
+ user,
+ endpoint,
+ messageId,
+ conversationId,
+ parentMessageId,
+ ...rest
+}) {
+ try {
+ // No parsing of convoId as may use threadId
+ const message = {
+ user,
+ endpoint,
+ messageId,
+ conversationId,
+ parentMessageId,
+ ...rest,
+ };
+
+ return await Message.findOneAndUpdate({ user, messageId }, message, {
+ upsert: true,
+ new: true,
+ });
+ } catch (err) {
+ logger.error('Error recording message:', err);
+ throw err;
+ }
+}
+
+/**
+ * Updates the text of a message.
+ *
+ * @async
+ * @function updateMessageText
+ * @param {Object} params - The update data object.
+ * @param {Object} req - The request object.
+ * @param {string} params.messageId - The unique identifier for the message.
+ * @param {string} params.text - The new text content of the message.
+ * @returns {Promise}
+ * @throws {Error} If there is an error in updating the message text.
+ */
+async function updateMessageText(req, { messageId, text }) {
+ try {
+ await Message.updateOne({ messageId, user: req.user.id }, { text });
+ } catch (err) {
+ logger.error('Error updating message text:', err);
+ throw err;
+ }
+}
+
+/**
+ * Updates a message.
+ *
+ * @async
+ * @function updateMessage
+ * @param {Object} req - The request object.
+ * @param {Object} message - The message object containing update data.
+ * @param {string} message.messageId - The unique identifier for the message.
+ * @param {string} [message.text] - The new text content of the message.
+ * @param {Object[]} [message.files] - The files associated with the message.
+ * @param {boolean} [message.isCreatedByUser] - Indicates if the message was created by the user.
+ * @param {string} [message.sender] - The identifier of the sender.
+ * @param {number} [message.tokenCount] - The number of tokens in the message.
+ * @param {Object} [metadata] - The operation metadata
+ * @param {string} [metadata.context] - The operation metadata
+ * @returns {Promise} The updated message document.
+ * @throws {Error} If there is an error in updating the message or if the message is not found.
+ */
+async function updateMessage(req, message, metadata) {
+ try {
+ const { messageId, ...update } = message;
+ const updatedMessage = await Message.findOneAndUpdate(
+ { messageId, user: req.user.id },
+ update,
+ {
+ new: true,
+ },
+ );
+
+ if (!updatedMessage) {
+ throw new Error('Message not found or user not authorized.');
+ }
+
+ return {
+ messageId: updatedMessage.messageId,
+ conversationId: updatedMessage.conversationId,
+ parentMessageId: updatedMessage.parentMessageId,
+ sender: updatedMessage.sender,
+ text: updatedMessage.text,
+ isCreatedByUser: updatedMessage.isCreatedByUser,
+ tokenCount: updatedMessage.tokenCount,
+ feedback: updatedMessage.feedback,
+ };
+ } catch (err) {
+ logger.error('Error updating message:', err);
+ if (metadata && metadata?.context) {
+ logger.info(`---\`updateMessage\` context: ${metadata.context}`);
+ }
+ throw err;
+ }
+}
+
+/**
+ * Deletes messages in a conversation since a specific message.
+ *
+ * @async
+ * @function deleteMessagesSince
+ * @param {Object} params - The parameters object.
+ * @param {Object} req - The request object.
+ * @param {string} params.messageId - The unique identifier for the message.
+ * @param {string} params.conversationId - The identifier of the conversation.
+ * @returns {Promise} The number of deleted messages.
+ * @throws {Error} If there is an error in deleting messages.
+ */
+async function deleteMessagesSince(req, { messageId, conversationId }) {
+ try {
+ const message = await Message.findOne({ messageId, user: req.user.id }).lean();
+
+ if (message) {
+ const query = Message.find({ conversationId, user: req.user.id });
+ return await query.deleteMany({
+ createdAt: { $gt: message.createdAt },
+ });
+ }
+ return undefined;
+ } catch (err) {
+ logger.error('Error deleting messages:', err);
+ throw err;
+ }
+}
+
+/**
+ * Retrieves messages from the database.
+ * @async
+ * @function getMessages
+ * @param {Record} filter - The filter criteria.
+ * @param {string | undefined} [select] - The fields to select.
+ * @returns {Promise} The messages that match the filter criteria.
+ * @throws {Error} If there is an error in retrieving messages.
+ */
+async function getMessages(filter, select) {
+ try {
+ if (select) {
+ return await Message.find(filter).select(select).sort({ createdAt: 1 }).lean();
+ }
+
+ return await Message.find(filter).sort({ createdAt: 1 }).lean();
+ } catch (err) {
+ logger.error('Error getting messages:', err);
+ throw err;
+ }
+}
+
+/**
+ * Retrieves a single message from the database.
+ * @async
+ * @function getMessage
+ * @param {{ user: string, messageId: string }} params - The search parameters
+ * @returns {Promise} The message that matches the criteria or null if not found
+ * @throws {Error} If there is an error in retrieving the message
+ */
+async function getMessage({ user, messageId }) {
+ try {
+ return await Message.findOne({
+ user,
+ messageId,
+ }).lean();
+ } catch (err) {
+ logger.error('Error getting message:', err);
+ throw err;
+ }
+}
+
+/**
+ * Deletes messages from the database.
+ *
+ * @async
+ * @function deleteMessages
+ * @param {import('mongoose').FilterQuery} filter - The filter criteria to find messages to delete.
+ * @returns {Promise} The metadata with count of deleted messages.
+ * @throws {Error} If there is an error in deleting messages.
+ */
+async function deleteMessages(filter) {
+ try {
+ return await Message.deleteMany(filter);
+ } catch (err) {
+ logger.error('Error deleting messages:', err);
+ throw err;
+ }
+}
+
+module.exports = {
+ saveMessage,
+ bulkSaveMessages,
+ recordMessage,
+ updateMessageText,
+ updateMessage,
+ deleteMessagesSince,
+ getMessages,
+ getMessage,
+ deleteMessages,
+};
diff --git a/packages/data-schemas/src/methods/message.spec.ts b/api/models/Message.spec.js
similarity index 61%
rename from packages/data-schemas/src/methods/message.spec.ts
rename to api/models/Message.spec.js
index dfa34c0eec..39b5b4337c 100644
--- a/packages/data-schemas/src/methods/message.spec.ts
+++ b/api/models/Message.spec.js
@@ -1,76 +1,52 @@
-import mongoose from 'mongoose';
-import { v4 as uuidv4 } from 'uuid';
-import { MongoMemoryServer } from 'mongodb-memory-server';
-import type { IMessage } from '..';
-import { createMessageMethods } from './message';
-import { tenantStorage, runAsSystem } from '~/config/tenantContext';
-import { createModels } from '../models';
+const mongoose = require('mongoose');
+const { v4: uuidv4 } = require('uuid');
+const { messageSchema } = require('@librechat/data-schemas');
+const { MongoMemoryServer } = require('mongodb-memory-server');
-jest.mock('~/config/winston', () => ({
- error: jest.fn(),
- warn: jest.fn(),
- info: jest.fn(),
- debug: jest.fn(),
-}));
+const {
+ saveMessage,
+ getMessages,
+ updateMessage,
+ deleteMessages,
+ bulkSaveMessages,
+ updateMessageText,
+ deleteMessagesSince,
+} = require('./Message');
-let mongoServer: InstanceType;
-let Message: mongoose.Model;
-let saveMessage: ReturnType['saveMessage'];
-let getMessages: ReturnType['getMessages'];
-let updateMessage: ReturnType['updateMessage'];
-let deleteMessages: ReturnType['deleteMessages'];
-let bulkSaveMessages: ReturnType['bulkSaveMessages'];
-let updateMessageText: ReturnType['updateMessageText'];
-let deleteMessagesSince: ReturnType['deleteMessagesSince'];
-let recordMessage: ReturnType['recordMessage'];
+jest.mock('~/server/services/Config/app');
-beforeAll(async () => {
- mongoServer = await MongoMemoryServer.create();
- const mongoUri = mongoServer.getUri();
-
- const models = createModels(mongoose);
- Object.assign(mongoose.models, models);
- Message = mongoose.models.Message;
-
- const methods = createMessageMethods(mongoose);
- saveMessage = methods.saveMessage;
- getMessages = methods.getMessages;
- updateMessage = methods.updateMessage;
- deleteMessages = methods.deleteMessages;
- bulkSaveMessages = methods.bulkSaveMessages;
- updateMessageText = methods.updateMessageText;
- deleteMessagesSince = methods.deleteMessagesSince;
- recordMessage = methods.recordMessage;
-
- await mongoose.connect(mongoUri);
-});
-
-afterAll(async () => {
- await mongoose.disconnect();
- await mongoServer.stop();
-});
+/**
+ * @type {import('mongoose').Model}
+ */
+let Message;
describe('Message Operations', () => {
- let mockCtx: {
- userId: string;
- isTemporary?: boolean;
- interfaceConfig?: { temporaryChatRetention?: number };
- };
- let mockMessageData: Partial = {
- messageId: 'msg123',
- conversationId: uuidv4(),
- text: 'Hello, world!',
- user: 'user123',
- };
+ let mongoServer;
+ let mockReq;
+ let mockMessageData;
+
+ beforeAll(async () => {
+ mongoServer = await MongoMemoryServer.create();
+ const mongoUri = mongoServer.getUri();
+ Message = mongoose.models.Message || mongoose.model('Message', messageSchema);
+ await mongoose.connect(mongoUri);
+ });
+
+ afterAll(async () => {
+ await mongoose.disconnect();
+ await mongoServer.stop();
+ });
beforeEach(async () => {
// Clear database
await Message.deleteMany({});
- mockCtx = {
- userId: 'user123',
- interfaceConfig: {
- temporaryChatRetention: 24, // Default 24 hours
+ mockReq = {
+ user: { id: 'user123' },
+ config: {
+ interfaceConfig: {
+ temporaryChatRetention: 24, // Default 24 hours
+ },
},
};
@@ -84,26 +60,26 @@ describe('Message Operations', () => {
describe('saveMessage', () => {
it('should save a message for an authenticated user', async () => {
- const result = await saveMessage(mockCtx, mockMessageData);
+ const result = await saveMessage(mockReq, mockMessageData);
- expect(result?.messageId).toBe('msg123');
- expect(result?.user).toBe('user123');
- expect(result?.text).toBe('Hello, world!');
+ expect(result.messageId).toBe('msg123');
+ expect(result.user).toBe('user123');
+ expect(result.text).toBe('Hello, world!');
// Verify the message was actually saved to the database
const savedMessage = await Message.findOne({ messageId: 'msg123', user: 'user123' });
expect(savedMessage).toBeTruthy();
- expect(savedMessage?.text).toBe('Hello, world!');
+ expect(savedMessage.text).toBe('Hello, world!');
});
it('should throw an error for unauthenticated user', async () => {
- mockCtx.userId = null as unknown as string;
- await expect(saveMessage(mockCtx, mockMessageData)).rejects.toThrow('User not authenticated');
+ mockReq.user = null;
+ await expect(saveMessage(mockReq, mockMessageData)).rejects.toThrow('User not authenticated');
});
it('should handle invalid conversation ID gracefully', async () => {
mockMessageData.conversationId = 'invalid-id';
- const result = await saveMessage(mockCtx, mockMessageData);
+ const result = await saveMessage(mockReq, mockMessageData);
expect(result).toBeUndefined();
});
});
@@ -111,38 +87,35 @@ describe('Message Operations', () => {
describe('updateMessageText', () => {
it('should update message text for the authenticated user', async () => {
// First save a message
- await saveMessage(mockCtx, mockMessageData);
+ await saveMessage(mockReq, mockMessageData);
// Then update it
- await updateMessageText(mockCtx.userId, { messageId: 'msg123', text: 'Updated text' });
+ await updateMessageText(mockReq, { messageId: 'msg123', text: 'Updated text' });
// Verify the update
const updatedMessage = await Message.findOne({ messageId: 'msg123', user: 'user123' });
- expect(updatedMessage?.text).toBe('Updated text');
+ expect(updatedMessage.text).toBe('Updated text');
});
});
describe('updateMessage', () => {
it('should update a message for the authenticated user', async () => {
// First save a message
- await saveMessage(mockCtx, mockMessageData);
+ await saveMessage(mockReq, mockMessageData);
- const result = await updateMessage(mockCtx.userId, {
- messageId: 'msg123',
- text: 'Updated text',
- });
+ const result = await updateMessage(mockReq, { messageId: 'msg123', text: 'Updated text' });
- expect(result?.messageId).toBe('msg123');
- expect(result?.text).toBe('Updated text');
+ expect(result.messageId).toBe('msg123');
+ expect(result.text).toBe('Updated text');
// Verify in database
const updatedMessage = await Message.findOne({ messageId: 'msg123', user: 'user123' });
- expect(updatedMessage?.text).toBe('Updated text');
+ expect(updatedMessage.text).toBe('Updated text');
});
it('should throw an error if message is not found', async () => {
await expect(
- updateMessage(mockCtx.userId, { messageId: 'nonexistent', text: 'Test' }),
+ updateMessage(mockReq, { messageId: 'nonexistent', text: 'Test' }),
).rejects.toThrow('Message not found or user not authorized.');
});
});
@@ -152,21 +125,21 @@ describe('Message Operations', () => {
const conversationId = uuidv4();
// Create multiple messages in the same conversation
- await saveMessage(mockCtx, {
+ await saveMessage(mockReq, {
messageId: 'msg1',
conversationId,
text: 'First message',
user: 'user123',
});
- await saveMessage(mockCtx, {
+ await saveMessage(mockReq, {
messageId: 'msg2',
conversationId,
text: 'Second message',
user: 'user123',
});
- await saveMessage(mockCtx, {
+ await saveMessage(mockReq, {
messageId: 'msg3',
conversationId,
text: 'Third message',
@@ -174,7 +147,7 @@ describe('Message Operations', () => {
});
// Delete messages since message2 (this should only delete messages created AFTER msg2)
- await deleteMessagesSince(mockCtx.userId, {
+ await deleteMessagesSince(mockReq, {
messageId: 'msg2',
conversationId,
});
@@ -188,7 +161,7 @@ describe('Message Operations', () => {
});
it('should return undefined if no message is found', async () => {
- const result = await deleteMessagesSince(mockCtx.userId, {
+ const result = await deleteMessagesSince(mockReq, {
messageId: 'nonexistent',
conversationId: 'convo123',
});
@@ -201,14 +174,14 @@ describe('Message Operations', () => {
const conversationId = uuidv4();
// Save some messages
- await saveMessage(mockCtx, {
+ await saveMessage(mockReq, {
messageId: 'msg1',
conversationId,
text: 'First message',
user: 'user123',
});
- await saveMessage(mockCtx, {
+ await saveMessage(mockReq, {
messageId: 'msg2',
conversationId,
text: 'Second message',
@@ -225,9 +198,9 @@ describe('Message Operations', () => {
describe('deleteMessages', () => {
it('should delete messages with the correct filter', async () => {
// Save some messages for different users
- await saveMessage(mockCtx, mockMessageData);
+ await saveMessage(mockReq, mockMessageData);
await saveMessage(
- { userId: 'user456' },
+ { user: { id: 'user456' } },
{
messageId: 'msg456',
conversationId: uuidv4(),
@@ -249,23 +222,22 @@ describe('Message Operations', () => {
describe('Conversation Hijacking Prevention', () => {
it("should not allow editing a message in another user's conversation", async () => {
+ const attackerReq = { user: { id: 'attacker123' } };
const victimConversationId = uuidv4();
const victimMessageId = 'victim-msg-123';
// First, save a message as the victim (but we'll try to edit as attacker)
- await saveMessage(
- { userId: 'victim123' },
- {
- messageId: victimMessageId,
- conversationId: victimConversationId,
- text: 'Victim message',
- user: 'victim123',
- },
- );
+ const victimReq = { user: { id: 'victim123' } };
+ await saveMessage(victimReq, {
+ messageId: victimMessageId,
+ conversationId: victimConversationId,
+ text: 'Victim message',
+ user: 'victim123',
+ });
// Attacker tries to edit the victim's message
await expect(
- updateMessage('attacker123', {
+ updateMessage(attackerReq, {
messageId: victimMessageId,
conversationId: victimConversationId,
text: 'Hacked message',
@@ -277,26 +249,25 @@ describe('Message Operations', () => {
messageId: victimMessageId,
user: 'victim123',
});
- expect(originalMessage?.text).toBe('Victim message');
+ expect(originalMessage.text).toBe('Victim message');
});
it("should not allow deleting messages from another user's conversation", async () => {
+ const attackerReq = { user: { id: 'attacker123' } };
const victimConversationId = uuidv4();
const victimMessageId = 'victim-msg-123';
// Save a message as the victim
- await saveMessage(
- { userId: 'victim123' },
- {
- messageId: victimMessageId,
- conversationId: victimConversationId,
- text: 'Victim message',
- user: 'victim123',
- },
- );
+ const victimReq = { user: { id: 'victim123' } };
+ await saveMessage(victimReq, {
+ messageId: victimMessageId,
+ conversationId: victimConversationId,
+ text: 'Victim message',
+ user: 'victim123',
+ });
// Attacker tries to delete from victim's conversation
- const result = await deleteMessagesSince('attacker123', {
+ const result = await deleteMessagesSince(attackerReq, {
messageId: victimMessageId,
conversationId: victimConversationId,
});
@@ -309,45 +280,41 @@ describe('Message Operations', () => {
user: 'victim123',
});
expect(victimMessage).toBeTruthy();
- expect(victimMessage?.text).toBe('Victim message');
+ expect(victimMessage.text).toBe('Victim message');
});
it("should not allow inserting a new message into another user's conversation", async () => {
+ const attackerReq = { user: { id: 'attacker123' } };
const victimConversationId = uuidv4();
// Attacker tries to save a message - this should succeed but with attacker's user ID
- const result = await saveMessage(
- { userId: 'attacker123' },
- {
- conversationId: victimConversationId,
- text: 'Inserted malicious message',
- messageId: 'new-msg-123',
- user: 'attacker123',
- },
- );
+ const result = await saveMessage(attackerReq, {
+ conversationId: victimConversationId,
+ text: 'Inserted malicious message',
+ messageId: 'new-msg-123',
+ user: 'attacker123',
+ });
expect(result).toBeTruthy();
- expect(result?.user).toBe('attacker123');
+ expect(result.user).toBe('attacker123');
// Verify the message was saved with the attacker's user ID, not as an anonymous message
const savedMessage = await Message.findOne({ messageId: 'new-msg-123' });
- expect(savedMessage?.user).toBe('attacker123');
- expect(savedMessage?.conversationId).toBe(victimConversationId);
+ expect(savedMessage.user).toBe('attacker123');
+ expect(savedMessage.conversationId).toBe(victimConversationId);
});
it('should allow retrieving messages from any conversation', async () => {
const victimConversationId = uuidv4();
// Save a message in the victim's conversation
- await saveMessage(
- { userId: 'victim123' },
- {
- messageId: 'victim-msg',
- conversationId: victimConversationId,
- text: 'Victim message',
- user: 'victim123',
- },
- );
+ const victimReq = { user: { id: 'victim123' } };
+ await saveMessage(victimReq, {
+ messageId: 'victim-msg',
+ conversationId: victimConversationId,
+ text: 'Victim message',
+ user: 'victim123',
+ });
// Anyone should be able to retrieve messages by conversation ID
const messages = await getMessages({ conversationId: victimConversationId });
@@ -364,21 +331,21 @@ describe('Message Operations', () => {
it('should save a message with expiredAt when isTemporary is true', async () => {
// Mock app config with 24 hour retention
- mockCtx.interfaceConfig = { temporaryChatRetention: 24 };
+ mockReq.config.interfaceConfig.temporaryChatRetention = 24;
- mockCtx.isTemporary = true;
+ mockReq.body = { isTemporary: true };
const beforeSave = new Date();
- const result = await saveMessage(mockCtx, mockMessageData);
+ const result = await saveMessage(mockReq, mockMessageData);
const afterSave = new Date();
- expect(result?.messageId).toBe('msg123');
- expect(result?.expiredAt).toBeDefined();
- expect(result?.expiredAt).toBeInstanceOf(Date);
+ expect(result.messageId).toBe('msg123');
+ expect(result.expiredAt).toBeDefined();
+ expect(result.expiredAt).toBeInstanceOf(Date);
// Verify expiredAt is approximately 24 hours in the future
const expectedExpirationTime = new Date(beforeSave.getTime() + 24 * 60 * 60 * 1000);
- const actualExpirationTime = new Date(result?.expiredAt ?? 0);
+ const actualExpirationTime = new Date(result.expiredAt);
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
expectedExpirationTime.getTime() - 1000,
@@ -389,37 +356,38 @@ describe('Message Operations', () => {
});
it('should save a message without expiredAt when isTemporary is false', async () => {
- mockCtx.isTemporary = false;
+ mockReq.body = { isTemporary: false };
- const result = await saveMessage(mockCtx, mockMessageData);
+ const result = await saveMessage(mockReq, mockMessageData);
- expect(result?.messageId).toBe('msg123');
- expect(result?.expiredAt).toBeNull();
+ expect(result.messageId).toBe('msg123');
+ expect(result.expiredAt).toBeNull();
});
it('should save a message without expiredAt when isTemporary is not provided', async () => {
- // No isTemporary set
+ // No isTemporary in body
+ mockReq.body = {};
- const result = await saveMessage(mockCtx, mockMessageData);
+ const result = await saveMessage(mockReq, mockMessageData);
- expect(result?.messageId).toBe('msg123');
- expect(result?.expiredAt).toBeNull();
+ expect(result.messageId).toBe('msg123');
+ expect(result.expiredAt).toBeNull();
});
it('should use custom retention period from config', async () => {
// Mock app config with 48 hour retention
- mockCtx.interfaceConfig = { temporaryChatRetention: 48 };
+ mockReq.config.interfaceConfig.temporaryChatRetention = 48;
- mockCtx.isTemporary = true;
+ mockReq.body = { isTemporary: true };
const beforeSave = new Date();
- const result = await saveMessage(mockCtx, mockMessageData);
+ const result = await saveMessage(mockReq, mockMessageData);
- expect(result?.expiredAt).toBeDefined();
+ expect(result.expiredAt).toBeDefined();
// Verify expiredAt is approximately 48 hours in the future
const expectedExpirationTime = new Date(beforeSave.getTime() + 48 * 60 * 60 * 1000);
- const actualExpirationTime = new Date(result?.expiredAt ?? 0);
+ const actualExpirationTime = new Date(result.expiredAt);
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
expectedExpirationTime.getTime() - 1000,
@@ -431,18 +399,18 @@ describe('Message Operations', () => {
it('should handle minimum retention period (1 hour)', async () => {
// Mock app config with less than minimum retention
- mockCtx.interfaceConfig = { temporaryChatRetention: 0.5 }; // Half hour - should be clamped to 1 hour
+ mockReq.config.interfaceConfig.temporaryChatRetention = 0.5; // Half hour - should be clamped to 1 hour
- mockCtx.isTemporary = true;
+ mockReq.body = { isTemporary: true };
const beforeSave = new Date();
- const result = await saveMessage(mockCtx, mockMessageData);
+ const result = await saveMessage(mockReq, mockMessageData);
- expect(result?.expiredAt).toBeDefined();
+ expect(result.expiredAt).toBeDefined();
// Verify expiredAt is approximately 1 hour in the future (minimum)
const expectedExpirationTime = new Date(beforeSave.getTime() + 1 * 60 * 60 * 1000);
- const actualExpirationTime = new Date(result?.expiredAt ?? 0);
+ const actualExpirationTime = new Date(result.expiredAt);
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
expectedExpirationTime.getTime() - 1000,
@@ -454,18 +422,18 @@ describe('Message Operations', () => {
it('should handle maximum retention period (8760 hours)', async () => {
// Mock app config with more than maximum retention
- mockCtx.interfaceConfig = { temporaryChatRetention: 10000 }; // Should be clamped to 8760 hours
+ mockReq.config.interfaceConfig.temporaryChatRetention = 10000; // Should be clamped to 8760 hours
- mockCtx.isTemporary = true;
+ mockReq.body = { isTemporary: true };
const beforeSave = new Date();
- const result = await saveMessage(mockCtx, mockMessageData);
+ const result = await saveMessage(mockReq, mockMessageData);
- expect(result?.expiredAt).toBeDefined();
+ expect(result.expiredAt).toBeDefined();
// Verify expiredAt is approximately 8760 hours (1 year) in the future
const expectedExpirationTime = new Date(beforeSave.getTime() + 8760 * 60 * 60 * 1000);
- const actualExpirationTime = new Date(result?.expiredAt ?? 0);
+ const actualExpirationTime = new Date(result.expiredAt);
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
expectedExpirationTime.getTime() - 1000,
@@ -477,22 +445,22 @@ describe('Message Operations', () => {
it('should handle missing config gracefully', async () => {
// Simulate missing config - should use default retention period
- delete mockCtx.interfaceConfig;
+ delete mockReq.config;
- mockCtx.isTemporary = true;
+ mockReq.body = { isTemporary: true };
const beforeSave = new Date();
- const result = await saveMessage(mockCtx, mockMessageData);
+ const result = await saveMessage(mockReq, mockMessageData);
const afterSave = new Date();
// Should still save the message with default retention period (30 days)
- expect(result?.messageId).toBe('msg123');
- expect(result?.expiredAt).toBeDefined();
- expect(result?.expiredAt).toBeInstanceOf(Date);
+ expect(result.messageId).toBe('msg123');
+ expect(result.expiredAt).toBeDefined();
+ expect(result.expiredAt).toBeInstanceOf(Date);
// Verify expiredAt is approximately 30 days in the future (720 hours)
const expectedExpirationTime = new Date(beforeSave.getTime() + 720 * 60 * 60 * 1000);
- const actualExpirationTime = new Date(result?.expiredAt ?? 0);
+ const actualExpirationTime = new Date(result.expiredAt);
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
expectedExpirationTime.getTime() - 1000,
@@ -504,18 +472,18 @@ describe('Message Operations', () => {
it('should use default retention when config is not provided', async () => {
// Mock getAppConfig to return empty config
- mockCtx.interfaceConfig = undefined; // Empty config
+ mockReq.config = {}; // Empty config
- mockCtx.isTemporary = true;
+ mockReq.body = { isTemporary: true };
const beforeSave = new Date();
- const result = await saveMessage(mockCtx, mockMessageData);
+ const result = await saveMessage(mockReq, mockMessageData);
- expect(result?.expiredAt).toBeDefined();
+ expect(result.expiredAt).toBeDefined();
// Default retention is 30 days (720 hours)
const expectedExpirationTime = new Date(beforeSave.getTime() + 30 * 24 * 60 * 60 * 1000);
- const actualExpirationTime = new Date(result?.expiredAt ?? 0);
+ const actualExpirationTime = new Date(result.expiredAt);
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
expectedExpirationTime.getTime() - 1000,
@@ -527,47 +495,47 @@ describe('Message Operations', () => {
it('should not update expiredAt on message update', async () => {
// First save a temporary message
- mockCtx.interfaceConfig = { temporaryChatRetention: 24 };
+ mockReq.config.interfaceConfig.temporaryChatRetention = 24;
- mockCtx.isTemporary = true;
- const savedMessage = await saveMessage(mockCtx, mockMessageData);
- const originalExpiredAt = savedMessage?.expiredAt;
+ mockReq.body = { isTemporary: true };
+ const savedMessage = await saveMessage(mockReq, mockMessageData);
+ const originalExpiredAt = savedMessage.expiredAt;
// Now update the message without isTemporary flag
- mockCtx.isTemporary = undefined;
- const updatedMessage = await updateMessage(mockCtx.userId, {
+ mockReq.body = {};
+ const updatedMessage = await updateMessage(mockReq, {
messageId: 'msg123',
text: 'Updated text',
});
// expiredAt should not be in the returned updated message object
- expect(updatedMessage?.expiredAt).toBeUndefined();
+ expect(updatedMessage.expiredAt).toBeUndefined();
// Verify in database that expiredAt wasn't changed
const dbMessage = await Message.findOne({ messageId: 'msg123', user: 'user123' });
- expect(dbMessage?.expiredAt).toEqual(originalExpiredAt);
+ expect(dbMessage.expiredAt).toEqual(originalExpiredAt);
});
it('should preserve expiredAt when saving existing temporary message', async () => {
// First save a temporary message
- mockCtx.interfaceConfig = { temporaryChatRetention: 24 };
+ mockReq.config.interfaceConfig.temporaryChatRetention = 24;
- mockCtx.isTemporary = true;
- const firstSave = await saveMessage(mockCtx, mockMessageData);
- const originalExpiredAt = firstSave?.expiredAt;
+ mockReq.body = { isTemporary: true };
+ const firstSave = await saveMessage(mockReq, mockMessageData);
+ const originalExpiredAt = firstSave.expiredAt;
// Wait a bit to ensure time difference
await new Promise((resolve) => setTimeout(resolve, 100));
// Save again with same messageId but different text
const updatedData = { ...mockMessageData, text: 'Updated text' };
- const secondSave = await saveMessage(mockCtx, updatedData);
+ const secondSave = await saveMessage(mockReq, updatedData);
// Should update text but create new expiredAt
- expect(secondSave?.text).toBe('Updated text');
- expect(secondSave?.expiredAt).toBeDefined();
- expect(new Date(secondSave?.expiredAt ?? 0).getTime()).toBeGreaterThan(
- new Date(originalExpiredAt ?? 0).getTime(),
+ expect(secondSave.text).toBe('Updated text');
+ expect(secondSave.expiredAt).toBeDefined();
+ expect(new Date(secondSave.expiredAt).getTime()).toBeGreaterThan(
+ new Date(originalExpiredAt).getTime(),
);
});
@@ -601,8 +569,8 @@ describe('Message Operations', () => {
const bulk1 = savedMessages.find((m) => m.messageId === 'bulk1');
const bulk2 = savedMessages.find((m) => m.messageId === 'bulk2');
- expect(bulk1?.expiredAt).toBeDefined();
- expect(bulk2?.expiredAt).toBeNull();
+ expect(bulk1.expiredAt).toBeDefined();
+ expect(bulk2.expiredAt).toBeNull();
});
});
@@ -611,11 +579,7 @@ describe('Message Operations', () => {
* Helper to create messages with specific timestamps
* Uses collection.insertOne to bypass Mongoose timestamps
*/
- const createMessageWithTimestamp = async (
- index: number,
- conversationId: string,
- createdAt: Date,
- ) => {
+ const createMessageWithTimestamp = async (index, conversationId, createdAt) => {
const messageId = uuidv4();
await Message.collection.insertOne({
messageId,
@@ -637,22 +601,15 @@ describe('Message Operations', () => {
conversationId,
user,
pageSize = 25,
- cursor = null as string | null,
+ cursor = null,
sortBy = 'createdAt',
sortDirection = 'desc',
- }: {
- conversationId: string;
- user: string;
- pageSize?: number;
- cursor?: string | null;
- sortBy?: string;
- sortDirection?: string;
}) => {
const sortOrder = sortDirection === 'asc' ? 1 : -1;
const sortField = ['createdAt', 'updatedAt'].includes(sortBy) ? sortBy : 'createdAt';
const cursorOperator = sortDirection === 'asc' ? '$gt' : '$lt';
- const filter: Record = { conversationId, user };
+ const filter = { conversationId, user };
if (cursor) {
filter[sortField] = { [cursorOperator]: new Date(cursor) };
}
@@ -662,13 +619,11 @@ describe('Message Operations', () => {
.limit(pageSize + 1)
.lean();
- let nextCursor: string | null = null;
+ let nextCursor = null;
if (messages.length > pageSize) {
messages.pop(); // Remove extra item used to detect next page
// Create cursor from the last RETURNED item (not the popped one)
- nextCursor = (messages[messages.length - 1] as Record)[
- sortField
- ] as string;
+ nextCursor = messages[messages.length - 1][sortField];
}
return { messages, nextCursor };
@@ -722,7 +677,7 @@ describe('Message Operations', () => {
const baseTime = new Date('2026-01-01T12:00:00.000Z');
// Create exactly 26 messages
- const messages: (IMessage | null)[] = [];
+ const messages = [];
for (let i = 0; i < 26; i++) {
const createdAt = new Date(baseTime.getTime() - i * 60000);
const msg = await createMessageWithTimestamp(i, conversationId, createdAt);
@@ -744,7 +699,7 @@ describe('Message Operations', () => {
// Item 26 should NOT be in page 1
const page1Ids = page1.messages.map((m) => m.messageId);
- expect(page1Ids).not.toContain(item26!.messageId);
+ expect(page1Ids).not.toContain(item26.messageId);
// Fetch second page
const page2 = await getMessagesByCursor({
@@ -756,7 +711,7 @@ describe('Message Operations', () => {
// Item 26 MUST be in page 2 (this was the bug - it was being skipped)
expect(page2.messages).toHaveLength(1);
- expect((page2.messages[0] as { messageId: string }).messageId).toBe(item26!.messageId);
+ expect(page2.messages[0].messageId).toBe(item26.messageId);
});
it('should sort by createdAt DESC by default', async () => {
@@ -785,10 +740,10 @@ describe('Message Operations', () => {
});
// Should be sorted by createdAt DESC (newest first) by default
- expect(result?.messages).toHaveLength(3);
- expect((result?.messages[0] as { messageId: string }).messageId).toBe(msg3!.messageId);
- expect((result?.messages[1] as { messageId: string }).messageId).toBe(msg2!.messageId);
- expect((result?.messages[2] as { messageId: string }).messageId).toBe(msg1!.messageId);
+ expect(result.messages).toHaveLength(3);
+ expect(result.messages[0].messageId).toBe(msg3.messageId);
+ expect(result.messages[1].messageId).toBe(msg2.messageId);
+ expect(result.messages[2].messageId).toBe(msg1.messageId);
});
it('should support ascending sort direction', async () => {
@@ -812,9 +767,9 @@ describe('Message Operations', () => {
});
// Should be sorted by createdAt ASC (oldest first)
- expect(result?.messages).toHaveLength(2);
- expect((result?.messages[0] as { messageId: string }).messageId).toBe(msg1!.messageId);
- expect((result?.messages[1] as { messageId: string }).messageId).toBe(msg2!.messageId);
+ expect(result.messages).toHaveLength(2);
+ expect(result.messages[0].messageId).toBe(msg1.messageId);
+ expect(result.messages[1].messageId).toBe(msg2.messageId);
});
it('should handle empty conversation', async () => {
@@ -825,8 +780,8 @@ describe('Message Operations', () => {
user: 'user123',
});
- expect(result?.messages).toHaveLength(0);
- expect(result?.nextCursor).toBeNull();
+ expect(result.messages).toHaveLength(0);
+ expect(result.nextCursor).toBeNull();
});
it('should only return messages for the specified user', async () => {
@@ -859,8 +814,8 @@ describe('Message Operations', () => {
});
// Should only return user123's message
- expect(result?.messages).toHaveLength(1);
- expect((result?.messages[0] as { user: string }).user).toBe('user123');
+ expect(result.messages).toHaveLength(1);
+ expect(result.messages[0].user).toBe('user123');
});
it('should handle exactly pageSize number of messages (no next page)', async () => {
@@ -879,8 +834,8 @@ describe('Message Operations', () => {
pageSize: 25,
});
- expect(result?.messages).toHaveLength(25);
- expect(result?.nextCursor).toBeNull(); // No next page
+ expect(result.messages).toHaveLength(25);
+ expect(result.nextCursor).toBeNull(); // No next page
});
it('should handle pageSize of 1', async () => {
@@ -894,8 +849,8 @@ describe('Message Operations', () => {
}
// Fetch with pageSize 1
- let cursor: string | null = null;
- const allMessages: unknown[] = [];
+ let cursor = null;
+ const allMessages = [];
for (let page = 0; page < 5; page++) {
const result = await getMessagesByCursor({
@@ -905,8 +860,8 @@ describe('Message Operations', () => {
cursor,
});
- allMessages.push(...(result?.messages ?? []));
- cursor = result?.nextCursor;
+ allMessages.push(...result.messages);
+ cursor = result.nextCursor;
if (!cursor) {
break;
@@ -915,7 +870,7 @@ describe('Message Operations', () => {
// Should get all 3 messages without duplicates
expect(allMessages).toHaveLength(3);
- const uniqueIds = new Set(allMessages.map((m) => (m as { messageId: string }).messageId));
+ const uniqueIds = new Set(allMessages.map((m) => m.messageId));
expect(uniqueIds.size).toBe(3);
});
@@ -924,7 +879,7 @@ describe('Message Operations', () => {
const sameTime = new Date('2026-01-01T12:00:00.000Z');
// Create multiple messages with the exact same timestamp
- const messages: (IMessage | null)[] = [];
+ const messages = [];
for (let i = 0; i < 5; i++) {
const msg = await createMessageWithTimestamp(i, conversationId, sameTime);
messages.push(msg);
@@ -937,89 +892,7 @@ describe('Message Operations', () => {
});
// All messages should be returned
- expect(result?.messages).toHaveLength(5);
- });
- });
-
- describe('tenantId stripping', () => {
- it('saveMessage should not write caller-supplied tenantId to the document', async () => {
- const messageId = uuidv4();
- const conversationId = uuidv4();
- const result = await saveMessage(
- { userId: 'user123' },
- { messageId, conversationId, text: 'Tenant test', tenantId: 'malicious-tenant' },
- );
-
- expect(result).not.toBeNull();
- expect(result).toBeDefined();
- const doc = await Message.findOne({ messageId }).lean();
- expect(doc).not.toBeNull();
- expect(doc?.text).toBe('Tenant test');
- expect(doc?.tenantId).toBeUndefined();
- });
-
- it('bulkSaveMessages should not overwrite tenantId via update payload', async () => {
- const messageId = uuidv4();
- const conversationId = uuidv4();
-
- await tenantStorage.run({ tenantId: 'real-tenant' }, async () => {
- await Message.create({
- messageId,
- conversationId,
- user: 'user123',
- text: 'Original',
- });
- });
-
- await tenantStorage.run({ tenantId: 'real-tenant' }, async () => {
- await bulkSaveMessages([
- {
- messageId,
- conversationId,
- user: 'user123',
- text: 'Updated',
- tenantId: 'malicious-tenant',
- },
- ]);
- });
-
- const doc = await runAsSystem(async () => Message.findOne({ messageId }).lean());
- expect(doc).not.toBeNull();
- expect(doc?.text).toBe('Updated');
- expect(doc?.tenantId).toBe('real-tenant');
- });
-
- it('recordMessage should not write caller-supplied tenantId to the document', async () => {
- const messageId = uuidv4();
- const conversationId = uuidv4();
- await recordMessage({
- user: 'user123',
- messageId,
- conversationId,
- text: 'Record tenant test',
- tenantId: 'malicious-tenant',
- });
-
- const doc = await Message.findOne({ messageId }).lean();
- expect(doc).not.toBeNull();
- expect(doc?.text).toBe('Record tenant test');
- expect(doc?.tenantId).toBeUndefined();
- });
-
- it('updateMessage should not write caller-supplied tenantId to the document', async () => {
- const messageId = uuidv4();
- const conversationId = uuidv4();
- await saveMessage({ userId: 'user123' }, { messageId, conversationId, text: 'Original' });
-
- await updateMessage('user123', {
- messageId,
- text: 'Updated',
- tenantId: 'malicious-tenant',
- });
-
- const doc = await Message.findOne({ messageId }).lean();
- expect(doc?.text).toBe('Updated');
- expect(doc?.tenantId).toBeUndefined();
+ expect(result.messages).toHaveLength(5);
});
});
});
diff --git a/api/models/Preset.js b/api/models/Preset.js
new file mode 100644
index 0000000000..4db3d59066
--- /dev/null
+++ b/api/models/Preset.js
@@ -0,0 +1,82 @@
+const { logger } = require('@librechat/data-schemas');
+const { Preset } = require('~/db/models');
+
+const getPreset = async (user, presetId) => {
+ try {
+ return await Preset.findOne({ user, presetId }).lean();
+ } catch (error) {
+ logger.error('[getPreset] Error getting single preset', error);
+ return { message: 'Error getting single preset' };
+ }
+};
+
+module.exports = {
+ getPreset,
+ getPresets: async (user, filter) => {
+ try {
+ const presets = await Preset.find({ ...filter, user }).lean();
+ const defaultValue = 10000;
+
+ presets.sort((a, b) => {
+ let orderA = a.order !== undefined ? a.order : defaultValue;
+ let orderB = b.order !== undefined ? b.order : defaultValue;
+
+ if (orderA !== orderB) {
+ return orderA - orderB;
+ }
+
+ return b.updatedAt - a.updatedAt;
+ });
+
+ return presets;
+ } catch (error) {
+ logger.error('[getPresets] Error getting presets', error);
+ return { message: 'Error retrieving presets' };
+ }
+ },
+ savePreset: async (user, { presetId, newPresetId, defaultPreset, ...preset }) => {
+ try {
+ const setter = { $set: {} };
+ const { user: _, ...cleanPreset } = preset;
+ const update = { presetId, ...cleanPreset };
+ if (preset.tools && Array.isArray(preset.tools)) {
+ update.tools =
+ preset.tools
+ .map((tool) => tool?.pluginKey ?? tool)
+ .filter((toolName) => typeof toolName === 'string') ?? [];
+ }
+ if (newPresetId) {
+ update.presetId = newPresetId;
+ }
+
+ if (defaultPreset) {
+ update.defaultPreset = defaultPreset;
+ update.order = 0;
+
+ const currentDefault = await Preset.findOne({ defaultPreset: true, user });
+
+ if (currentDefault && currentDefault.presetId !== presetId) {
+ await Preset.findByIdAndUpdate(currentDefault._id, {
+ $unset: { defaultPreset: '', order: '' },
+ });
+ }
+ } else if (defaultPreset === false) {
+ update.defaultPreset = undefined;
+ update.order = undefined;
+ setter['$unset'] = { defaultPreset: '', order: '' };
+ }
+
+ setter.$set = update;
+ return await Preset.findOneAndUpdate({ presetId, user }, setter, { new: true, upsert: true });
+ } catch (error) {
+ logger.error('[savePreset] Error saving preset', error);
+ return { message: 'Error saving preset' };
+ }
+ },
+ deletePresets: async (user, filter) => {
+ // let toRemove = await Preset.find({ ...filter, user }).select('presetId');
+ // const ids = toRemove.map((instance) => instance.presetId);
+ let deleteCount = await Preset.deleteMany({ ...filter, user });
+ return deleteCount;
+ },
+};
diff --git a/api/models/Project.js b/api/models/Project.js
new file mode 100644
index 0000000000..8fd1e556f9
--- /dev/null
+++ b/api/models/Project.js
@@ -0,0 +1,133 @@
+const { GLOBAL_PROJECT_NAME } = require('librechat-data-provider').Constants;
+const { Project } = require('~/db/models');
+
+/**
+ * Retrieve a project by ID and convert the found project document to a plain object.
+ *
+ * @param {string} projectId - The ID of the project to find and return as a plain object.
+ * @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
+ * @returns {Promise} A plain object representing the project document, or `null` if no project is found.
+ */
+const getProjectById = async function (projectId, fieldsToSelect = null) {
+ const query = Project.findById(projectId);
+
+ if (fieldsToSelect) {
+ query.select(fieldsToSelect);
+ }
+
+ return await query.lean();
+};
+
+/**
+ * Retrieve a project by name and convert the found project document to a plain object.
+ * If the project with the given name doesn't exist and the name is "instance", create it and return the lean version.
+ *
+ * @param {string} projectName - The name of the project to find or create.
+ * @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
+ * @returns {Promise} A plain object representing the project document.
+ */
+const getProjectByName = async function (projectName, fieldsToSelect = null) {
+ const query = { name: projectName };
+ const update = { $setOnInsert: { name: projectName } };
+ const options = {
+ new: true,
+ upsert: projectName === GLOBAL_PROJECT_NAME,
+ lean: true,
+ select: fieldsToSelect,
+ };
+
+ return await Project.findOneAndUpdate(query, update, options);
+};
+
+/**
+ * Add an array of prompt group IDs to a project's promptGroupIds array, ensuring uniqueness.
+ *
+ * @param {string} projectId - The ID of the project to update.
+ * @param {string[]} promptGroupIds - The array of prompt group IDs to add to the project.
+ * @returns {Promise} The updated project document.
+ */
+const addGroupIdsToProject = async function (projectId, promptGroupIds) {
+ return await Project.findByIdAndUpdate(
+ projectId,
+ { $addToSet: { promptGroupIds: { $each: promptGroupIds } } },
+ { new: true },
+ );
+};
+
+/**
+ * Remove an array of prompt group IDs from a project's promptGroupIds array.
+ *
+ * @param {string} projectId - The ID of the project to update.
+ * @param {string[]} promptGroupIds - The array of prompt group IDs to remove from the project.
+ * @returns {Promise} The updated project document.
+ */
+const removeGroupIdsFromProject = async function (projectId, promptGroupIds) {
+ return await Project.findByIdAndUpdate(
+ projectId,
+ { $pull: { promptGroupIds: { $in: promptGroupIds } } },
+ { new: true },
+ );
+};
+
+/**
+ * Remove a prompt group ID from all projects.
+ *
+ * @param {string} promptGroupId - The ID of the prompt group to remove from projects.
+ * @returns {Promise}
+ */
+const removeGroupFromAllProjects = async (promptGroupId) => {
+ await Project.updateMany({}, { $pull: { promptGroupIds: promptGroupId } });
+};
+
+/**
+ * Add an array of agent IDs to a project's agentIds array, ensuring uniqueness.
+ *
+ * @param {string} projectId - The ID of the project to update.
+ * @param {string[]} agentIds - The array of agent IDs to add to the project.
+ * @returns {Promise} The updated project document.
+ */
+const addAgentIdsToProject = async function (projectId, agentIds) {
+ return await Project.findByIdAndUpdate(
+ projectId,
+ { $addToSet: { agentIds: { $each: agentIds } } },
+ { new: true },
+ );
+};
+
+/**
+ * Remove an array of agent IDs from a project's agentIds array.
+ *
+ * @param {string} projectId - The ID of the project to update.
+ * @param {string[]} agentIds - The array of agent IDs to remove from the project.
+ * @returns {Promise} The updated project document.
+ */
+const removeAgentIdsFromProject = async function (projectId, agentIds) {
+ return await Project.findByIdAndUpdate(
+ projectId,
+ { $pull: { agentIds: { $in: agentIds } } },
+ { new: true },
+ );
+};
+
+/**
+ * Remove an agent ID from all projects.
+ *
+ * @param {string} agentId - The ID of the agent to remove from projects.
+ * @returns {Promise}
+ */
+const removeAgentFromAllProjects = async (agentId) => {
+ await Project.updateMany({}, { $pull: { agentIds: agentId } });
+};
+
+module.exports = {
+ getProjectById,
+ getProjectByName,
+ /* prompts */
+ addGroupIdsToProject,
+ removeGroupIdsFromProject,
+ removeGroupFromAllProjects,
+ /* agents */
+ addAgentIdsToProject,
+ removeAgentIdsFromProject,
+ removeAgentFromAllProjects,
+};
diff --git a/api/models/Prompt.js b/api/models/Prompt.js
new file mode 100644
index 0000000000..bde911b23a
--- /dev/null
+++ b/api/models/Prompt.js
@@ -0,0 +1,708 @@
+const { ObjectId } = require('mongodb');
+const { escapeRegExp } = require('@librechat/api');
+const { logger } = require('@librechat/data-schemas');
+const {
+ Constants,
+ SystemRoles,
+ ResourceType,
+ SystemCategories,
+} = require('librechat-data-provider');
+const {
+ removeGroupFromAllProjects,
+ removeGroupIdsFromProject,
+ addGroupIdsToProject,
+ getProjectByName,
+} = require('./Project');
+const { removeAllPermissions } = require('~/server/services/PermissionService');
+const { PromptGroup, Prompt, AclEntry } = require('~/db/models');
+
+/**
+ * Create a pipeline for the aggregation to get prompt groups
+ * @param {Object} query
+ * @param {number} skip
+ * @param {number} limit
+ * @returns {[Object]} - The pipeline for the aggregation
+ */
+const createGroupPipeline = (query, skip, limit) => {
+ return [
+ { $match: query },
+ { $sort: { createdAt: -1 } },
+ { $skip: skip },
+ { $limit: limit },
+ {
+ $lookup: {
+ from: 'prompts',
+ localField: 'productionId',
+ foreignField: '_id',
+ as: 'productionPrompt',
+ },
+ },
+ { $unwind: { path: '$productionPrompt', preserveNullAndEmptyArrays: true } },
+ {
+ $project: {
+ name: 1,
+ numberOfGenerations: 1,
+ oneliner: 1,
+ category: 1,
+ projectIds: 1,
+ productionId: 1,
+ author: 1,
+ authorName: 1,
+ createdAt: 1,
+ updatedAt: 1,
+ 'productionPrompt.prompt': 1,
+ // 'productionPrompt._id': 1,
+ // 'productionPrompt.type': 1,
+ },
+ },
+ ];
+};
+
+/**
+ * Create a pipeline for the aggregation to get all prompt groups
+ * @param {Object} query
+ * @param {Partial} $project
+ * @returns {[Object]} - The pipeline for the aggregation
+ */
+const createAllGroupsPipeline = (
+ query,
+ $project = {
+ name: 1,
+ oneliner: 1,
+ category: 1,
+ author: 1,
+ authorName: 1,
+ createdAt: 1,
+ updatedAt: 1,
+ command: 1,
+ 'productionPrompt.prompt': 1,
+ },
+) => {
+ return [
+ { $match: query },
+ { $sort: { createdAt: -1 } },
+ {
+ $lookup: {
+ from: 'prompts',
+ localField: 'productionId',
+ foreignField: '_id',
+ as: 'productionPrompt',
+ },
+ },
+ { $unwind: { path: '$productionPrompt', preserveNullAndEmptyArrays: true } },
+ {
+ $project,
+ },
+ ];
+};
+
+/**
+ * Get all prompt groups with filters
+ * @param {ServerRequest} req
+ * @param {TPromptGroupsWithFilterRequest} filter
+ * @returns {Promise}
+ */
+const getAllPromptGroups = async (req, filter) => {
+ try {
+ const { name, ...query } = filter;
+
+ let searchShared = true;
+ let searchSharedOnly = false;
+ if (name) {
+ query.name = new RegExp(escapeRegExp(name), 'i');
+ }
+ if (!query.category) {
+ delete query.category;
+ } else if (query.category === SystemCategories.MY_PROMPTS) {
+ searchShared = false;
+ delete query.category;
+ } else if (query.category === SystemCategories.NO_CATEGORY) {
+ query.category = '';
+ } else if (query.category === SystemCategories.SHARED_PROMPTS) {
+ searchSharedOnly = true;
+ delete query.category;
+ }
+
+ let combinedQuery = query;
+
+ if (searchShared) {
+ const project = await getProjectByName(Constants.GLOBAL_PROJECT_NAME, 'promptGroupIds');
+ if (project && project.promptGroupIds && project.promptGroupIds.length > 0) {
+ const projectQuery = { _id: { $in: project.promptGroupIds }, ...query };
+ delete projectQuery.author;
+ combinedQuery = searchSharedOnly ? projectQuery : { $or: [projectQuery, query] };
+ }
+ }
+
+ const promptGroupsPipeline = createAllGroupsPipeline(combinedQuery);
+ return await PromptGroup.aggregate(promptGroupsPipeline).exec();
+ } catch (error) {
+ console.error('Error getting all prompt groups', error);
+ return { message: 'Error getting all prompt groups' };
+ }
+};
+
+/**
+ * Get prompt groups with filters
+ * @param {ServerRequest} req
+ * @param {TPromptGroupsWithFilterRequest} filter
+ * @returns {Promise}
+ */
+const getPromptGroups = async (req, filter) => {
+ try {
+ const { pageNumber = 1, pageSize = 10, name, ...query } = filter;
+
+ const validatedPageNumber = Math.max(parseInt(pageNumber, 10), 1);
+ const validatedPageSize = Math.max(parseInt(pageSize, 10), 1);
+
+ let searchShared = true;
+ let searchSharedOnly = false;
+ if (name) {
+ query.name = new RegExp(escapeRegExp(name), 'i');
+ }
+ if (!query.category) {
+ delete query.category;
+ } else if (query.category === SystemCategories.MY_PROMPTS) {
+ searchShared = false;
+ delete query.category;
+ } else if (query.category === SystemCategories.NO_CATEGORY) {
+ query.category = '';
+ } else if (query.category === SystemCategories.SHARED_PROMPTS) {
+ searchSharedOnly = true;
+ delete query.category;
+ }
+
+ let combinedQuery = query;
+
+ if (searchShared) {
+ // const projects = req.user.projects || []; // TODO: handle multiple projects
+ const project = await getProjectByName(Constants.GLOBAL_PROJECT_NAME, 'promptGroupIds');
+ if (project && project.promptGroupIds && project.promptGroupIds.length > 0) {
+ const projectQuery = { _id: { $in: project.promptGroupIds }, ...query };
+ delete projectQuery.author;
+ combinedQuery = searchSharedOnly ? projectQuery : { $or: [projectQuery, query] };
+ }
+ }
+
+ const skip = (validatedPageNumber - 1) * validatedPageSize;
+ const limit = validatedPageSize;
+
+ const promptGroupsPipeline = createGroupPipeline(combinedQuery, skip, limit);
+ const totalPromptGroupsPipeline = [{ $match: combinedQuery }, { $count: 'total' }];
+
+ const [promptGroupsResults, totalPromptGroupsResults] = await Promise.all([
+ PromptGroup.aggregate(promptGroupsPipeline).exec(),
+ PromptGroup.aggregate(totalPromptGroupsPipeline).exec(),
+ ]);
+
+ const promptGroups = promptGroupsResults;
+ const totalPromptGroups =
+ totalPromptGroupsResults.length > 0 ? totalPromptGroupsResults[0].total : 0;
+
+ return {
+ promptGroups,
+ pageNumber: validatedPageNumber.toString(),
+ pageSize: validatedPageSize.toString(),
+ pages: Math.ceil(totalPromptGroups / validatedPageSize).toString(),
+ };
+ } catch (error) {
+ console.error('Error getting prompt groups', error);
+ return { message: 'Error getting prompt groups' };
+ }
+};
+
+/**
+ * @param {Object} fields
+ * @param {string} fields._id
+ * @param {string} fields.author
+ * @param {string} fields.role
+ * @returns {Promise}
+ */
+const deletePromptGroup = async ({ _id, author, role }) => {
+ // Build query - with ACL, author is optional
+ const query = { _id };
+ const groupQuery = { groupId: new ObjectId(_id) };
+
+ // Legacy: Add author filter if provided (backward compatibility)
+ if (author && role !== SystemRoles.ADMIN) {
+ query.author = author;
+ groupQuery.author = author;
+ }
+
+ const response = await PromptGroup.deleteOne(query);
+
+ if (!response || response.deletedCount === 0) {
+ throw new Error('Prompt group not found');
+ }
+
+ await Prompt.deleteMany(groupQuery);
+ await removeGroupFromAllProjects(_id);
+
+ try {
+ await removeAllPermissions({ resourceType: ResourceType.PROMPTGROUP, resourceId: _id });
+ } catch (error) {
+ logger.error('Error removing promptGroup permissions:', error);
+ }
+
+ return { message: 'Prompt group deleted successfully' };
+};
+
+/**
+ * Get prompt groups by accessible IDs with optional cursor-based pagination.
+ * @param {Object} params - The parameters for getting accessible prompt groups.
+ * @param {Array} [params.accessibleIds] - Array of prompt group ObjectIds the user has ACL access to.
+ * @param {Object} [params.otherParams] - Additional query parameters (including author filter).
+ * @param {number} [params.limit] - Number of prompt groups to return (max 100). If not provided, returns all prompt groups.
+ * @param {string} [params.after] - Cursor for pagination - get prompt groups after this cursor. // base64 encoded JSON string with updatedAt and _id.
+ * @returns {Promise} A promise that resolves to an object containing the prompt groups data and pagination info.
+ */
+async function getListPromptGroupsByAccess({
+ accessibleIds = [],
+ otherParams = {},
+ limit = null,
+ after = null,
+}) {
+ const isPaginated = limit !== null && limit !== undefined;
+ const normalizedLimit = isPaginated ? Math.min(Math.max(1, parseInt(limit) || 20), 100) : null;
+
+ // Build base query combining ACL accessible prompt groups with other filters
+ const baseQuery = { ...otherParams, _id: { $in: accessibleIds } };
+
+ // Add cursor condition
+ if (after && typeof after === 'string' && after !== 'undefined' && after !== 'null') {
+ try {
+ const cursor = JSON.parse(Buffer.from(after, 'base64').toString('utf8'));
+ const { updatedAt, _id } = cursor;
+
+ const cursorCondition = {
+ $or: [
+ { updatedAt: { $lt: new Date(updatedAt) } },
+ { updatedAt: new Date(updatedAt), _id: { $gt: new ObjectId(_id) } },
+ ],
+ };
+
+ // Merge cursor condition with base query
+ if (Object.keys(baseQuery).length > 0) {
+ baseQuery.$and = [{ ...baseQuery }, cursorCondition];
+ // Remove the original conditions from baseQuery to avoid duplication
+ Object.keys(baseQuery).forEach((key) => {
+ if (key !== '$and') delete baseQuery[key];
+ });
+ } else {
+ Object.assign(baseQuery, cursorCondition);
+ }
+ } catch (error) {
+ logger.warn('Invalid cursor:', error.message);
+ }
+ }
+
+ // Build aggregation pipeline
+ const pipeline = [{ $match: baseQuery }, { $sort: { updatedAt: -1, _id: 1 } }];
+
+ // Only apply limit if pagination is requested
+ if (isPaginated) {
+ pipeline.push({ $limit: normalizedLimit + 1 });
+ }
+
+ // Add lookup for production prompt
+ pipeline.push(
+ {
+ $lookup: {
+ from: 'prompts',
+ localField: 'productionId',
+ foreignField: '_id',
+ as: 'productionPrompt',
+ },
+ },
+ { $unwind: { path: '$productionPrompt', preserveNullAndEmptyArrays: true } },
+ {
+ $project: {
+ name: 1,
+ numberOfGenerations: 1,
+ oneliner: 1,
+ category: 1,
+ projectIds: 1,
+ productionId: 1,
+ author: 1,
+ authorName: 1,
+ createdAt: 1,
+ updatedAt: 1,
+ 'productionPrompt.prompt': 1,
+ },
+ },
+ );
+
+ const promptGroups = await PromptGroup.aggregate(pipeline).exec();
+
+ const hasMore = isPaginated ? promptGroups.length > normalizedLimit : false;
+ const data = (isPaginated ? promptGroups.slice(0, normalizedLimit) : promptGroups).map(
+ (group) => {
+ if (group.author) {
+ group.author = group.author.toString();
+ }
+ return group;
+ },
+ );
+
+ // Generate next cursor only if paginated
+ let nextCursor = null;
+ if (isPaginated && hasMore && data.length > 0) {
+ const lastGroup = promptGroups[normalizedLimit - 1];
+ nextCursor = Buffer.from(
+ JSON.stringify({
+ updatedAt: lastGroup.updatedAt.toISOString(),
+ _id: lastGroup._id.toString(),
+ }),
+ ).toString('base64');
+ }
+
+ return {
+ object: 'list',
+ data,
+ first_id: data.length > 0 ? data[0]._id.toString() : null,
+ last_id: data.length > 0 ? data[data.length - 1]._id.toString() : null,
+ has_more: hasMore,
+ after: nextCursor,
+ };
+}
+
+module.exports = {
+ getPromptGroups,
+ deletePromptGroup,
+ getAllPromptGroups,
+ getListPromptGroupsByAccess,
+ /**
+ * Create a prompt and its respective group
+ * @param {TCreatePromptRecord} saveData
+ * @returns {Promise}
+ */
+ createPromptGroup: async (saveData) => {
+ try {
+ const { prompt, group, author, authorName } = saveData;
+
+ let newPromptGroup = await PromptGroup.findOneAndUpdate(
+ { ...group, author, authorName, productionId: null },
+ { $setOnInsert: { ...group, author, authorName, productionId: null } },
+ { new: true, upsert: true },
+ )
+ .lean()
+ .select('-__v')
+ .exec();
+
+ const newPrompt = await Prompt.findOneAndUpdate(
+ { ...prompt, author, groupId: newPromptGroup._id },
+ { $setOnInsert: { ...prompt, author, groupId: newPromptGroup._id } },
+ { new: true, upsert: true },
+ )
+ .lean()
+ .select('-__v')
+ .exec();
+
+ newPromptGroup = await PromptGroup.findByIdAndUpdate(
+ newPromptGroup._id,
+ { productionId: newPrompt._id },
+ { new: true },
+ )
+ .lean()
+ .select('-__v')
+ .exec();
+
+ return {
+ prompt: newPrompt,
+ group: {
+ ...newPromptGroup,
+ productionPrompt: { prompt: newPrompt.prompt },
+ },
+ };
+ } catch (error) {
+ logger.error('Error saving prompt group', error);
+ throw new Error('Error saving prompt group');
+ }
+ },
+ /**
+ * Save a prompt
+ * @param {TCreatePromptRecord} saveData
+ * @returns {Promise}
+ */
+ savePrompt: async (saveData) => {
+ try {
+ const { prompt, author } = saveData;
+ const newPromptData = {
+ ...prompt,
+ author,
+ };
+
+ /** @type {TPrompt} */
+ let newPrompt;
+ try {
+ newPrompt = await Prompt.create(newPromptData);
+ } catch (error) {
+ if (error?.message?.includes('groupId_1_version_1')) {
+ await Prompt.db.collection('prompts').dropIndex('groupId_1_version_1');
+ } else {
+ throw error;
+ }
+ newPrompt = await Prompt.create(newPromptData);
+ }
+
+ return { prompt: newPrompt };
+ } catch (error) {
+ logger.error('Error saving prompt', error);
+ return { message: 'Error saving prompt' };
+ }
+ },
+ getPrompts: async (filter) => {
+ try {
+ return await Prompt.find(filter).sort({ createdAt: -1 }).lean();
+ } catch (error) {
+ logger.error('Error getting prompts', error);
+ return { message: 'Error getting prompts' };
+ }
+ },
+ getPrompt: async (filter) => {
+ try {
+ if (filter.groupId) {
+ filter.groupId = new ObjectId(filter.groupId);
+ }
+ return await Prompt.findOne(filter).lean();
+ } catch (error) {
+ logger.error('Error getting prompt', error);
+ return { message: 'Error getting prompt' };
+ }
+ },
+ /**
+ * Get prompt groups with filters
+ * @param {TGetRandomPromptsRequest} filter
+ * @returns {Promise}
+ */
+ getRandomPromptGroups: async (filter) => {
+ try {
+ const result = await PromptGroup.aggregate([
+ {
+ $match: {
+ category: { $ne: '' },
+ },
+ },
+ {
+ $group: {
+ _id: '$category',
+ promptGroup: { $first: '$$ROOT' },
+ },
+ },
+ {
+ $replaceRoot: { newRoot: '$promptGroup' },
+ },
+ {
+ $sample: { size: +filter.limit + +filter.skip },
+ },
+ {
+ $skip: +filter.skip,
+ },
+ {
+ $limit: +filter.limit,
+ },
+ ]);
+ return { prompts: result };
+ } catch (error) {
+ logger.error('Error getting prompt groups', error);
+ return { message: 'Error getting prompt groups' };
+ }
+ },
+ getPromptGroupsWithPrompts: async (filter) => {
+ try {
+ return await PromptGroup.findOne(filter)
+ .populate({
+ path: 'prompts',
+ select: '-_id -__v -user',
+ })
+ .select('-_id -__v -user')
+ .lean();
+ } catch (error) {
+ logger.error('Error getting prompt groups', error);
+ return { message: 'Error getting prompt groups' };
+ }
+ },
+ getPromptGroup: async (filter) => {
+ try {
+ return await PromptGroup.findOne(filter).lean();
+ } catch (error) {
+ logger.error('Error getting prompt group', error);
+ return { message: 'Error getting prompt group' };
+ }
+ },
+ /**
+ * Deletes a prompt and its corresponding prompt group if it is the last prompt in the group.
+ *
+ * @param {Object} options - The options for deleting the prompt.
+ * @param {ObjectId|string} options.promptId - The ID of the prompt to delete.
+ * @param {ObjectId|string} options.groupId - The ID of the prompt's group.
+ * @param {ObjectId|string} options.author - The ID of the prompt's author.
+ * @param {string} options.role - The role of the prompt's author.
+ * @return {Promise} An object containing the result of the deletion.
+ * If the prompt was deleted successfully, the object will have a property 'prompt' with the value 'Prompt deleted successfully'.
+ * If the prompt group was deleted successfully, the object will have a property 'promptGroup' with the message 'Prompt group deleted successfully' and id of the deleted group.
+ * If there was an error deleting the prompt, the object will have a property 'message' with the value 'Error deleting prompt'.
+ */
+ deletePrompt: async ({ promptId, groupId, author, role }) => {
+ const query = { _id: promptId, groupId, author };
+ if (role === SystemRoles.ADMIN) {
+ delete query.author;
+ }
+ const { deletedCount } = await Prompt.deleteOne(query);
+ if (deletedCount === 0) {
+ throw new Error('Failed to delete the prompt');
+ }
+
+ const remainingPrompts = await Prompt.find({ groupId })
+ .select('_id')
+ .sort({ createdAt: 1 })
+ .lean();
+
+ if (remainingPrompts.length === 0) {
+ // Remove all ACL entries for the promptGroup when deleting the last prompt
+ try {
+ await removeAllPermissions({
+ resourceType: ResourceType.PROMPTGROUP,
+ resourceId: groupId,
+ });
+ } catch (error) {
+ logger.error('Error removing promptGroup permissions:', error);
+ }
+
+ await PromptGroup.deleteOne({ _id: groupId });
+ await removeGroupFromAllProjects(groupId);
+
+ return {
+ prompt: 'Prompt deleted successfully',
+ promptGroup: {
+ message: 'Prompt group deleted successfully',
+ id: groupId,
+ },
+ };
+ } else {
+ const promptGroup = await PromptGroup.findById(groupId).lean();
+ if (promptGroup.productionId.toString() === promptId.toString()) {
+ await PromptGroup.updateOne(
+ { _id: groupId },
+ { productionId: remainingPrompts[remainingPrompts.length - 1]._id },
+ );
+ }
+
+ return { prompt: 'Prompt deleted successfully' };
+ }
+ },
+ /**
+ * Delete all prompts and prompt groups created by a specific user.
+ * @param {ServerRequest} req - The server request object.
+ * @param {string} userId - The ID of the user whose prompts and prompt groups are to be deleted.
+ */
+ deleteUserPrompts: async (req, userId) => {
+ try {
+ const promptGroups = await getAllPromptGroups(req, { author: new ObjectId(userId) });
+
+ if (promptGroups.length === 0) {
+ return;
+ }
+
+ const groupIds = promptGroups.map((group) => group._id);
+
+ for (const groupId of groupIds) {
+ await removeGroupFromAllProjects(groupId);
+ }
+
+ await AclEntry.deleteMany({
+ resourceType: ResourceType.PROMPTGROUP,
+ resourceId: { $in: groupIds },
+ });
+
+ await PromptGroup.deleteMany({ author: new ObjectId(userId) });
+ await Prompt.deleteMany({ author: new ObjectId(userId) });
+ } catch (error) {
+ logger.error('[deleteUserPrompts] General error:', error);
+ }
+ },
+ /**
+ * Update prompt group
+ * @param {Partial} filter - Filter to find prompt group
+ * @param {Partial} data - Data to update
+ * @returns {Promise}
+ */
+ updatePromptGroup: async (filter, data) => {
+ try {
+ const updateOps = {};
+ if (data.removeProjectIds) {
+ for (const projectId of data.removeProjectIds) {
+ await removeGroupIdsFromProject(projectId, [filter._id]);
+ }
+
+ updateOps.$pull = { projectIds: { $in: data.removeProjectIds } };
+ delete data.removeProjectIds;
+ }
+
+ if (data.projectIds) {
+ for (const projectId of data.projectIds) {
+ await addGroupIdsToProject(projectId, [filter._id]);
+ }
+
+ updateOps.$addToSet = { projectIds: { $each: data.projectIds } };
+ delete data.projectIds;
+ }
+
+ const updateData = { ...data, ...updateOps };
+ const updatedDoc = await PromptGroup.findOneAndUpdate(filter, updateData, {
+ new: true,
+ upsert: false,
+ });
+
+ if (!updatedDoc) {
+ throw new Error('Prompt group not found');
+ }
+
+ return updatedDoc;
+ } catch (error) {
+ logger.error('Error updating prompt group', error);
+ return { message: 'Error updating prompt group' };
+ }
+ },
+ /**
+ * Function to make a prompt production based on its ID.
+ * @param {String} promptId - The ID of the prompt to make production.
+ * @returns {Object} The result of the production operation.
+ */
+ makePromptProduction: async (promptId) => {
+ try {
+ const prompt = await Prompt.findById(promptId).lean();
+
+ if (!prompt) {
+ throw new Error('Prompt not found');
+ }
+
+ await PromptGroup.findByIdAndUpdate(
+ prompt.groupId,
+ { productionId: prompt._id },
+ { new: true },
+ )
+ .lean()
+ .exec();
+
+ return {
+ message: 'Prompt production made successfully',
+ };
+ } catch (error) {
+ logger.error('Error making prompt production', error);
+ return { message: 'Error making prompt production' };
+ }
+ },
+ updatePromptLabels: async (_id, labels) => {
+ try {
+ const response = await Prompt.updateOne({ _id }, { $set: { labels } });
+ if (response.matchedCount === 0) {
+ return { message: 'Prompt not found' };
+ }
+ return { message: 'Prompt labels updated successfully' };
+ } catch (error) {
+ logger.error('Error updating prompt labels', error);
+ return { message: 'Error updating prompt labels' };
+ }
+ },
+};
diff --git a/api/models/Prompt.spec.js b/api/models/Prompt.spec.js
new file mode 100644
index 0000000000..e00a1a518c
--- /dev/null
+++ b/api/models/Prompt.spec.js
@@ -0,0 +1,564 @@
+const mongoose = require('mongoose');
+const { ObjectId } = require('mongodb');
+const { logger } = require('@librechat/data-schemas');
+const { MongoMemoryServer } = require('mongodb-memory-server');
+const {
+ SystemRoles,
+ ResourceType,
+ AccessRoleIds,
+ PrincipalType,
+ PermissionBits,
+} = require('librechat-data-provider');
+
+// Mock the config/connect module to prevent connection attempts during tests
+jest.mock('../../config/connect', () => jest.fn().mockResolvedValue(true));
+
+const dbModels = require('~/db/models');
+
+// Disable console for tests
+logger.silent = true;
+
+let mongoServer;
+let Prompt, PromptGroup, AclEntry, AccessRole, User, Group, Project;
+let promptFns, permissionService;
+let testUsers, testGroups, testRoles;
+
+beforeAll(async () => {
+ // Set up MongoDB memory server
+ mongoServer = await MongoMemoryServer.create();
+ const mongoUri = mongoServer.getUri();
+ await mongoose.connect(mongoUri);
+
+ // Initialize models
+ Prompt = dbModels.Prompt;
+ PromptGroup = dbModels.PromptGroup;
+ AclEntry = dbModels.AclEntry;
+ AccessRole = dbModels.AccessRole;
+ User = dbModels.User;
+ Group = dbModels.Group;
+ Project = dbModels.Project;
+
+ promptFns = require('~/models/Prompt');
+ permissionService = require('~/server/services/PermissionService');
+
+ // Create test data
+ await setupTestData();
+});
+
+afterAll(async () => {
+ await mongoose.disconnect();
+ await mongoServer.stop();
+ jest.clearAllMocks();
+});
+
+async function setupTestData() {
+ // Create access roles for promptGroups
+ testRoles = {
+ viewer: await AccessRole.create({
+ accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
+ name: 'Viewer',
+ description: 'Can view promptGroups',
+ resourceType: ResourceType.PROMPTGROUP,
+ permBits: PermissionBits.VIEW,
+ }),
+ editor: await AccessRole.create({
+ accessRoleId: AccessRoleIds.PROMPTGROUP_EDITOR,
+ name: 'Editor',
+ description: 'Can view and edit promptGroups',
+ resourceType: ResourceType.PROMPTGROUP,
+ permBits: PermissionBits.VIEW | PermissionBits.EDIT,
+ }),
+ owner: await AccessRole.create({
+ accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
+ name: 'Owner',
+ description: 'Full control over promptGroups',
+ resourceType: ResourceType.PROMPTGROUP,
+ permBits:
+ PermissionBits.VIEW | PermissionBits.EDIT | PermissionBits.DELETE | PermissionBits.SHARE,
+ }),
+ };
+
+ // Create test users
+ testUsers = {
+ owner: await User.create({
+ name: 'Prompt Owner',
+ email: 'owner@example.com',
+ role: SystemRoles.USER,
+ }),
+ editor: await User.create({
+ name: 'Prompt Editor',
+ email: 'editor@example.com',
+ role: SystemRoles.USER,
+ }),
+ viewer: await User.create({
+ name: 'Prompt Viewer',
+ email: 'viewer@example.com',
+ role: SystemRoles.USER,
+ }),
+ admin: await User.create({
+ name: 'Admin User',
+ email: 'admin@example.com',
+ role: SystemRoles.ADMIN,
+ }),
+ noAccess: await User.create({
+ name: 'No Access User',
+ email: 'noaccess@example.com',
+ role: SystemRoles.USER,
+ }),
+ };
+
+ // Create test groups
+ testGroups = {
+ editors: await Group.create({
+ name: 'Prompt Editors',
+ description: 'Group with editor access',
+ }),
+ viewers: await Group.create({
+ name: 'Prompt Viewers',
+ description: 'Group with viewer access',
+ }),
+ };
+
+ await Project.create({
+ name: 'Global',
+ description: 'Global project',
+ promptGroupIds: [],
+ });
+}
+
+describe('Prompt ACL Permissions', () => {
+ describe('Creating Prompts with Permissions', () => {
+ it('should grant owner permissions when creating a prompt', async () => {
+ // First create a group
+ const testGroup = await PromptGroup.create({
+ name: 'Test Group',
+ category: 'testing',
+ author: testUsers.owner._id,
+ authorName: testUsers.owner.name,
+ productionId: new mongoose.Types.ObjectId(),
+ });
+
+ const promptData = {
+ prompt: {
+ prompt: 'Test prompt content',
+ name: 'Test Prompt',
+ type: 'text',
+ groupId: testGroup._id,
+ },
+ author: testUsers.owner._id,
+ };
+
+ await promptFns.savePrompt(promptData);
+
+ // Manually grant permissions as would happen in the route
+ await permissionService.grantPermission({
+ principalType: PrincipalType.USER,
+ principalId: testUsers.owner._id,
+ resourceType: ResourceType.PROMPTGROUP,
+ resourceId: testGroup._id,
+ accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
+ grantedBy: testUsers.owner._id,
+ });
+
+ // Check ACL entry
+ const aclEntry = await AclEntry.findOne({
+ resourceType: ResourceType.PROMPTGROUP,
+ resourceId: testGroup._id,
+ principalType: PrincipalType.USER,
+ principalId: testUsers.owner._id,
+ });
+
+ expect(aclEntry).toBeTruthy();
+ expect(aclEntry.permBits).toBe(testRoles.owner.permBits);
+ });
+ });
+
+ describe('Accessing Prompts', () => {
+ let testPromptGroup;
+
+ beforeEach(async () => {
+ // Create a prompt group
+ testPromptGroup = await PromptGroup.create({
+ name: 'Test Group',
+ author: testUsers.owner._id,
+ authorName: testUsers.owner.name,
+ productionId: new ObjectId(),
+ });
+
+ // Create a prompt
+ await Prompt.create({
+ prompt: 'Test prompt for access control',
+ name: 'Access Test Prompt',
+ author: testUsers.owner._id,
+ groupId: testPromptGroup._id,
+ type: 'text',
+ });
+
+ // Grant owner permissions
+ await permissionService.grantPermission({
+ principalType: PrincipalType.USER,
+ principalId: testUsers.owner._id,
+ resourceType: ResourceType.PROMPTGROUP,
+ resourceId: testPromptGroup._id,
+ accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
+ grantedBy: testUsers.owner._id,
+ });
+ });
+
+ afterEach(async () => {
+ await Prompt.deleteMany({});
+ await PromptGroup.deleteMany({});
+ await AclEntry.deleteMany({});
+ });
+
+ it('owner should have full access to their prompt', async () => {
+ const hasAccess = await permissionService.checkPermission({
+ userId: testUsers.owner._id,
+ resourceType: ResourceType.PROMPTGROUP,
+ resourceId: testPromptGroup._id,
+ requiredPermission: PermissionBits.VIEW,
+ });
+
+ expect(hasAccess).toBe(true);
+
+ const canEdit = await permissionService.checkPermission({
+ userId: testUsers.owner._id,
+ resourceType: ResourceType.PROMPTGROUP,
+ resourceId: testPromptGroup._id,
+ requiredPermission: PermissionBits.EDIT,
+ });
+
+ expect(canEdit).toBe(true);
+ });
+
+ it('user with viewer role should only have view access', async () => {
+ // Grant viewer permissions
+ await permissionService.grantPermission({
+ principalType: PrincipalType.USER,
+ principalId: testUsers.viewer._id,
+ resourceType: ResourceType.PROMPTGROUP,
+ resourceId: testPromptGroup._id,
+ accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
+ grantedBy: testUsers.owner._id,
+ });
+
+ const canView = await permissionService.checkPermission({
+ userId: testUsers.viewer._id,
+ resourceType: ResourceType.PROMPTGROUP,
+ resourceId: testPromptGroup._id,
+ requiredPermission: PermissionBits.VIEW,
+ });
+
+ const canEdit = await permissionService.checkPermission({
+ userId: testUsers.viewer._id,
+ resourceType: ResourceType.PROMPTGROUP,
+ resourceId: testPromptGroup._id,
+ requiredPermission: PermissionBits.EDIT,
+ });
+
+ expect(canView).toBe(true);
+ expect(canEdit).toBe(false);
+ });
+
+ it('user without permissions should have no access', async () => {
+ const hasAccess = await permissionService.checkPermission({
+ userId: testUsers.noAccess._id,
+ resourceType: ResourceType.PROMPTGROUP,
+ resourceId: testPromptGroup._id,
+ requiredPermission: PermissionBits.VIEW,
+ });
+
+ expect(hasAccess).toBe(false);
+ });
+
+ it('admin should have access regardless of permissions', async () => {
+ // Admin users should work through normal permission system
+ // The middleware layer handles admin bypass, not the permission service
+ const hasAccess = await permissionService.checkPermission({
+ userId: testUsers.admin._id,
+ resourceType: ResourceType.PROMPTGROUP,
+ resourceId: testPromptGroup._id,
+ requiredPermission: PermissionBits.VIEW,
+ });
+
+ // Without explicit permissions, even admin won't have access at this layer
+ expect(hasAccess).toBe(false);
+
+ // The actual admin bypass happens in the middleware layer (`canAccessPromptViaGroup`/`canAccessPromptGroupResource`)
+ // which checks req.user.role === SystemRoles.ADMIN
+ });
+ });
+
+ describe('Group-based Access', () => {
+ let testPromptGroup;
+
+ beforeEach(async () => {
+ // Create a prompt group first
+ testPromptGroup = await PromptGroup.create({
+ name: 'Group Access Test Group',
+ author: testUsers.owner._id,
+ authorName: testUsers.owner.name,
+ productionId: new ObjectId(),
+ });
+
+ await Prompt.create({
+ prompt: 'Group access test prompt',
+ name: 'Group Test',
+ author: testUsers.owner._id,
+ groupId: testPromptGroup._id,
+ type: 'text',
+ });
+
+ // Add users to groups
+ await User.findByIdAndUpdate(testUsers.editor._id, {
+ $push: { groups: testGroups.editors._id },
+ });
+
+ await User.findByIdAndUpdate(testUsers.viewer._id, {
+ $push: { groups: testGroups.viewers._id },
+ });
+ });
+
+ afterEach(async () => {
+ await Prompt.deleteMany({});
+ await AclEntry.deleteMany({});
+ await User.updateMany({}, { $set: { groups: [] } });
+ });
+
+ it('group members should inherit group permissions', async () => {
+ // Create a prompt group
+ const testPromptGroup = await PromptGroup.create({
+ name: 'Group Test Group',
+ author: testUsers.owner._id,
+ authorName: testUsers.owner.name,
+ productionId: new ObjectId(),
+ });
+
+ const { addUserToGroup } = require('~/models');
+ await addUserToGroup(testUsers.editor._id, testGroups.editors._id);
+
+ const prompt = await promptFns.savePrompt({
+ author: testUsers.owner._id,
+ prompt: {
+ prompt: 'Group test prompt',
+ name: 'Group Test',
+ groupId: testPromptGroup._id,
+ type: 'text',
+ },
+ });
+
+ // Check if savePrompt returned an error
+ if (!prompt || !prompt.prompt) {
+ throw new Error(`Failed to save prompt: ${prompt?.message || 'Unknown error'}`);
+ }
+
+ // Grant edit permissions to the group
+ await permissionService.grantPermission({
+ principalType: PrincipalType.GROUP,
+ principalId: testGroups.editors._id,
+ resourceType: ResourceType.PROMPTGROUP,
+ resourceId: testPromptGroup._id,
+ accessRoleId: AccessRoleIds.PROMPTGROUP_EDITOR,
+ grantedBy: testUsers.owner._id,
+ });
+
+ // Check if group member has access
+ const hasAccess = await permissionService.checkPermission({
+ userId: testUsers.editor._id,
+ resourceType: ResourceType.PROMPTGROUP,
+ resourceId: testPromptGroup._id,
+ requiredPermission: PermissionBits.EDIT,
+ });
+
+ expect(hasAccess).toBe(true);
+
+ // Check that non-member doesn't have access
+ const nonMemberAccess = await permissionService.checkPermission({
+ userId: testUsers.viewer._id,
+ resourceType: ResourceType.PROMPTGROUP,
+ resourceId: testPromptGroup._id,
+ requiredPermission: PermissionBits.EDIT,
+ });
+
+ expect(nonMemberAccess).toBe(false);
+ });
+ });
+
+ describe('Public Access', () => {
+ let publicPromptGroup, privatePromptGroup;
+
+ beforeEach(async () => {
+ // Create separate prompt groups for public and private access
+ publicPromptGroup = await PromptGroup.create({
+ name: 'Public Access Test Group',
+ author: testUsers.owner._id,
+ authorName: testUsers.owner.name,
+ productionId: new ObjectId(),
+ });
+
+ privatePromptGroup = await PromptGroup.create({
+ name: 'Private Access Test Group',
+ author: testUsers.owner._id,
+ authorName: testUsers.owner.name,
+ productionId: new ObjectId(),
+ });
+
+ // Create prompts in their respective groups
+ await Prompt.create({
+ prompt: 'Public prompt',
+ name: 'Public',
+ author: testUsers.owner._id,
+ groupId: publicPromptGroup._id,
+ type: 'text',
+ });
+
+ await Prompt.create({
+ prompt: 'Private prompt',
+ name: 'Private',
+ author: testUsers.owner._id,
+ groupId: privatePromptGroup._id,
+ type: 'text',
+ });
+
+ // Grant public view access to publicPromptGroup
+ await permissionService.grantPermission({
+ principalType: PrincipalType.PUBLIC,
+ principalId: null,
+ resourceType: ResourceType.PROMPTGROUP,
+ resourceId: publicPromptGroup._id,
+ accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
+ grantedBy: testUsers.owner._id,
+ });
+
+ // Grant only owner access to privatePromptGroup
+ await permissionService.grantPermission({
+ principalType: PrincipalType.USER,
+ principalId: testUsers.owner._id,
+ resourceType: ResourceType.PROMPTGROUP,
+ resourceId: privatePromptGroup._id,
+ accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
+ grantedBy: testUsers.owner._id,
+ });
+ });
+
+ afterEach(async () => {
+ await Prompt.deleteMany({});
+ await PromptGroup.deleteMany({});
+ await AclEntry.deleteMany({});
+ });
+
+ it('public prompt should be accessible to any user', async () => {
+ const hasAccess = await permissionService.checkPermission({
+ userId: testUsers.noAccess._id,
+ resourceType: ResourceType.PROMPTGROUP,
+ resourceId: publicPromptGroup._id,
+ requiredPermission: PermissionBits.VIEW,
+ includePublic: true,
+ });
+
+ expect(hasAccess).toBe(true);
+ });
+
+ it('private prompt should not be accessible to unauthorized users', async () => {
+ const hasAccess = await permissionService.checkPermission({
+ userId: testUsers.noAccess._id,
+ resourceType: ResourceType.PROMPTGROUP,
+ resourceId: privatePromptGroup._id,
+ requiredPermission: PermissionBits.VIEW,
+ includePublic: true,
+ });
+
+ expect(hasAccess).toBe(false);
+ });
+ });
+
+ describe('Prompt Deletion', () => {
+ let testPromptGroup;
+
+ it('should remove ACL entries when prompt is deleted', async () => {
+ testPromptGroup = await PromptGroup.create({
+ name: 'Deletion Test Group',
+ author: testUsers.owner._id,
+ authorName: testUsers.owner.name,
+ productionId: new ObjectId(),
+ });
+
+ const prompt = await promptFns.savePrompt({
+ author: testUsers.owner._id,
+ prompt: {
+ prompt: 'To be deleted',
+ name: 'Delete Test',
+ groupId: testPromptGroup._id,
+ type: 'text',
+ },
+ });
+
+ // Check if savePrompt returned an error
+ if (!prompt || !prompt.prompt) {
+ throw new Error(`Failed to save prompt: ${prompt?.message || 'Unknown error'}`);
+ }
+
+ const testPromptId = prompt.prompt._id;
+ const promptGroupId = testPromptGroup._id;
+
+ // Grant permission
+ await permissionService.grantPermission({
+ principalType: PrincipalType.USER,
+ principalId: testUsers.owner._id,
+ resourceType: ResourceType.PROMPTGROUP,
+ resourceId: testPromptGroup._id,
+ accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
+ grantedBy: testUsers.owner._id,
+ });
+
+ // Verify ACL entry exists
+ const beforeDelete = await AclEntry.find({
+ resourceType: ResourceType.PROMPTGROUP,
+ resourceId: testPromptGroup._id,
+ });
+ expect(beforeDelete).toHaveLength(1);
+
+ // Delete the prompt
+ await promptFns.deletePrompt({
+ promptId: testPromptId,
+ groupId: promptGroupId,
+ author: testUsers.owner._id,
+ role: SystemRoles.USER,
+ });
+
+ // Verify ACL entries are removed
+ const aclEntries = await AclEntry.find({
+ resourceType: ResourceType.PROMPTGROUP,
+ resourceId: testPromptGroup._id,
+ });
+
+ expect(aclEntries).toHaveLength(0);
+ });
+ });
+
+ describe('Backwards Compatibility', () => {
+ it('should handle prompts without ACL entries gracefully', async () => {
+ // Create a prompt group first
+ const promptGroup = await PromptGroup.create({
+ name: 'Legacy Test Group',
+ author: testUsers.owner._id,
+ authorName: testUsers.owner.name,
+ productionId: new ObjectId(),
+ });
+
+ // Create a prompt without ACL entries (legacy prompt)
+ const legacyPrompt = await Prompt.create({
+ prompt: 'Legacy prompt without ACL',
+ name: 'Legacy',
+ author: testUsers.owner._id,
+ groupId: promptGroup._id,
+ type: 'text',
+ });
+
+ // The system should handle this gracefully
+ const prompt = await promptFns.getPrompt({ _id: legacyPrompt._id });
+ expect(prompt).toBeTruthy();
+ expect(prompt._id.toString()).toBe(legacyPrompt._id.toString());
+ });
+ });
+});
diff --git a/config/__tests__/migrate-prompt-permissions.spec.js b/api/models/PromptGroupMigration.spec.js
similarity index 90%
rename from config/__tests__/migrate-prompt-permissions.spec.js
rename to api/models/PromptGroupMigration.spec.js
index 2d5b2cb4b0..f568012cb3 100644
--- a/config/__tests__/migrate-prompt-permissions.spec.js
+++ b/api/models/PromptGroupMigration.spec.js
@@ -3,6 +3,7 @@ const { ObjectId } = require('mongodb');
const { logger } = require('@librechat/data-schemas');
const { MongoMemoryServer } = require('mongodb-memory-server');
const {
+ Constants,
ResourceType,
AccessRoleIds,
PrincipalType,
@@ -11,16 +12,16 @@ const {
} = require('librechat-data-provider');
// Mock the config/connect module to prevent connection attempts during tests
-jest.mock('../connect', () => jest.fn().mockResolvedValue(true));
+jest.mock('../../config/connect', () => jest.fn().mockResolvedValue(true));
// Disable console for tests
logger.silent = true;
describe('PromptGroup Migration Script', () => {
let mongoServer;
- let Prompt, PromptGroup, AclEntry, AccessRole, User;
+ let Prompt, PromptGroup, AclEntry, AccessRole, User, Project;
let migrateToPromptGroupPermissions;
- let testOwner;
+ let testOwner, testProject;
let ownerRole, viewerRole;
beforeAll(async () => {
@@ -36,6 +37,7 @@ describe('PromptGroup Migration Script', () => {
AclEntry = dbModels.AclEntry;
AccessRole = dbModels.AccessRole;
User = dbModels.User;
+ Project = dbModels.Project;
// Create test user
testOwner = await User.create({
@@ -44,10 +46,11 @@ describe('PromptGroup Migration Script', () => {
role: 'USER',
});
- // Create test project document in the raw `projects` collection
- const projectName = 'instance';
- await mongoose.connection.db.collection('projects').insertOne({
+ // Create test project with the proper name
+ const projectName = Constants.GLOBAL_PROJECT_NAME || 'instance';
+ testProject = await Project.create({
name: projectName,
+ description: 'Global project',
promptGroupIds: [],
});
@@ -78,7 +81,7 @@ describe('PromptGroup Migration Script', () => {
});
// Import migration function
- const migration = require('../migrate-prompt-permissions');
+ const migration = require('../../config/migrate-prompt-permissions');
migrateToPromptGroupPermissions = migration.migrateToPromptGroupPermissions;
});
@@ -92,9 +95,9 @@ describe('PromptGroup Migration Script', () => {
await Prompt.deleteMany({});
await PromptGroup.deleteMany({});
await AclEntry.deleteMany({});
- await mongoose.connection.db
- .collection('projects')
- .updateOne({ name: 'instance' }, { $set: { promptGroupIds: [] } });
+ // Reset the project's promptGroupIds array
+ testProject.promptGroupIds = [];
+ await testProject.save();
});
it('should categorize promptGroups correctly in dry run', async () => {
@@ -115,9 +118,8 @@ describe('PromptGroup Migration Script', () => {
});
// Add global group to project's promptGroupIds array
- await mongoose.connection.db
- .collection('projects')
- .updateOne({ name: 'instance' }, { $set: { promptGroupIds: [globalPromptGroup._id] } });
+ testProject.promptGroupIds = [globalPromptGroup._id];
+ await testProject.save();
const result = await migrateToPromptGroupPermissions({ dryRun: true });
@@ -144,9 +146,8 @@ describe('PromptGroup Migration Script', () => {
});
// Add global group to project's promptGroupIds array
- await mongoose.connection.db
- .collection('projects')
- .updateOne({ name: 'instance' }, { $set: { promptGroupIds: [globalPromptGroup._id] } });
+ testProject.promptGroupIds = [globalPromptGroup._id];
+ await testProject.save();
const result = await migrateToPromptGroupPermissions({ dryRun: false });
diff --git a/api/models/Role.js b/api/models/Role.js
new file mode 100644
index 0000000000..b7f806f3b6
--- /dev/null
+++ b/api/models/Role.js
@@ -0,0 +1,304 @@
+const {
+ CacheKeys,
+ SystemRoles,
+ roleDefaults,
+ permissionsSchema,
+ removeNullishValues,
+} = require('librechat-data-provider');
+const { logger } = require('@librechat/data-schemas');
+const getLogStores = require('~/cache/getLogStores');
+const { Role } = require('~/db/models');
+
+/**
+ * Retrieve a role by name and convert the found role document to a plain object.
+ * If the role with the given name doesn't exist and the name is a system defined role,
+ * create it and return the lean version.
+ *
+ * @param {string} roleName - The name of the role to find or create.
+ * @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
+ * @returns {Promise} Role document.
+ */
+const getRoleByName = async function (roleName, fieldsToSelect = null) {
+ const cache = getLogStores(CacheKeys.ROLES);
+ try {
+ const cachedRole = await cache.get(roleName);
+ if (cachedRole) {
+ return cachedRole;
+ }
+ let query = Role.findOne({ name: roleName });
+ if (fieldsToSelect) {
+ query = query.select(fieldsToSelect);
+ }
+ let role = await query.lean().exec();
+
+ if (!role && SystemRoles[roleName]) {
+ role = await new Role(roleDefaults[roleName]).save();
+ await cache.set(roleName, role);
+ return role.toObject();
+ }
+ await cache.set(roleName, role);
+ return role;
+ } catch (error) {
+ throw new Error(`Failed to retrieve or create role: ${error.message}`);
+ }
+};
+
+/**
+ * Update role values by name.
+ *
+ * @param {string} roleName - The name of the role to update.
+ * @param {Partial} updates - The fields to update.
+ * @returns {Promise} Updated role document.
+ */
+const updateRoleByName = async function (roleName, updates) {
+ const cache = getLogStores(CacheKeys.ROLES);
+ try {
+ const role = await Role.findOneAndUpdate(
+ { name: roleName },
+ { $set: updates },
+ { new: true, lean: true },
+ )
+ .select('-__v')
+ .lean()
+ .exec();
+ await cache.set(roleName, role);
+ return role;
+ } catch (error) {
+ throw new Error(`Failed to update role: ${error.message}`);
+ }
+};
+
+/**
+ * Updates access permissions for a specific role and multiple permission types.
+ * @param {string} roleName - The role to update.
+ * @param {Object.>} permissionsUpdate - Permissions to update and their values.
+ * @param {IRole} [roleData] - Optional role data to use instead of fetching from the database.
+ */
+async function updateAccessPermissions(roleName, permissionsUpdate, roleData) {
+ // Filter and clean the permission updates based on our schema definition.
+ const updates = {};
+ for (const [permissionType, permissions] of Object.entries(permissionsUpdate)) {
+ if (permissionsSchema.shape && permissionsSchema.shape[permissionType]) {
+ updates[permissionType] = removeNullishValues(permissions);
+ }
+ }
+ if (!Object.keys(updates).length) {
+ return;
+ }
+
+ try {
+ const role = roleData ?? (await getRoleByName(roleName));
+ if (!role) {
+ return;
+ }
+
+ const currentPermissions = role.permissions || {};
+ const updatedPermissions = { ...currentPermissions };
+ let hasChanges = false;
+
+ const unsetFields = {};
+ const permissionTypes = Object.keys(permissionsSchema.shape || {});
+ for (const permType of permissionTypes) {
+ if (role[permType] && typeof role[permType] === 'object') {
+ logger.info(
+ `Migrating '${roleName}' role from old schema: found '${permType}' at top level`,
+ );
+
+ updatedPermissions[permType] = {
+ ...updatedPermissions[permType],
+ ...role[permType],
+ };
+
+ unsetFields[permType] = 1;
+ hasChanges = true;
+ }
+ }
+
+ // Migrate legacy SHARED_GLOBAL → SHARE for PROMPTS and AGENTS.
+ // SHARED_GLOBAL was removed in favour of SHARE in PR #11283. If the DB still has
+ // SHARED_GLOBAL but not SHARE, inherit the value so sharing intent is preserved.
+ const legacySharedGlobalTypes = ['PROMPTS', 'AGENTS'];
+ for (const legacyPermType of legacySharedGlobalTypes) {
+ const existingTypePerms = currentPermissions[legacyPermType];
+ if (
+ existingTypePerms &&
+ 'SHARED_GLOBAL' in existingTypePerms &&
+ !('SHARE' in existingTypePerms) &&
+ updates[legacyPermType] &&
+ // Don't override an explicit SHARE value the caller already provided
+ !('SHARE' in updates[legacyPermType])
+ ) {
+ const inheritedValue = existingTypePerms['SHARED_GLOBAL'];
+ updates[legacyPermType]['SHARE'] = inheritedValue;
+ logger.info(
+ `Migrating '${roleName}' role ${legacyPermType}.SHARED_GLOBAL=${inheritedValue} → SHARE`,
+ );
+ }
+ }
+
+ for (const [permissionType, permissions] of Object.entries(updates)) {
+ const currentTypePermissions = currentPermissions[permissionType] || {};
+ updatedPermissions[permissionType] = { ...currentTypePermissions };
+
+ for (const [permission, value] of Object.entries(permissions)) {
+ if (currentTypePermissions[permission] !== value) {
+ updatedPermissions[permissionType][permission] = value;
+ hasChanges = true;
+ logger.info(
+ `Updating '${roleName}' role permission '${permissionType}' '${permission}' from ${currentTypePermissions[permission]} to: ${value}`,
+ );
+ }
+ }
+ }
+
+ // Clean up orphaned SHARED_GLOBAL fields left in DB after the schema rename.
+ // Since we $set the full permissions object, deleting from updatedPermissions
+ // is sufficient to remove the field from MongoDB.
+ for (const legacyPermType of legacySharedGlobalTypes) {
+ const existingTypePerms = currentPermissions[legacyPermType];
+ if (existingTypePerms && 'SHARED_GLOBAL' in existingTypePerms) {
+ if (!updates[legacyPermType]) {
+ // permType wasn't in the update payload so the migration block above didn't run.
+ // Create a writable copy and handle the SHARED_GLOBAL → SHARE inheritance here
+ // to avoid removing SHARED_GLOBAL without writing SHARE (data loss).
+ updatedPermissions[legacyPermType] = { ...existingTypePerms };
+ if (!('SHARE' in existingTypePerms)) {
+ updatedPermissions[legacyPermType]['SHARE'] = existingTypePerms['SHARED_GLOBAL'];
+ logger.info(
+ `Migrating '${roleName}' role ${legacyPermType}.SHARED_GLOBAL=${existingTypePerms['SHARED_GLOBAL']} → SHARE`,
+ );
+ }
+ }
+ delete updatedPermissions[legacyPermType]['SHARED_GLOBAL'];
+ hasChanges = true;
+ logger.info(
+ `Removed legacy SHARED_GLOBAL field from '${roleName}' role ${legacyPermType} permissions`,
+ );
+ }
+ }
+
+ if (hasChanges) {
+ const updateObj = { permissions: updatedPermissions };
+
+ if (Object.keys(unsetFields).length > 0) {
+ logger.info(
+ `Unsetting old schema fields for '${roleName}' role: ${Object.keys(unsetFields).join(', ')}`,
+ );
+
+ try {
+ await Role.updateOne(
+ { name: roleName },
+ {
+ $set: updateObj,
+ $unset: unsetFields,
+ },
+ );
+
+ const cache = getLogStores(CacheKeys.ROLES);
+ const updatedRole = await Role.findOne({ name: roleName }).select('-__v').lean().exec();
+ await cache.set(roleName, updatedRole);
+
+ logger.info(`Updated role '${roleName}' and removed old schema fields`);
+ } catch (updateError) {
+ logger.error(`Error during role migration update: ${updateError.message}`);
+ throw updateError;
+ }
+ } else {
+ // Standard update if no migration needed
+ await updateRoleByName(roleName, updateObj);
+ }
+
+ logger.info(`Updated '${roleName}' role permissions`);
+ } else {
+ logger.info(`No changes needed for '${roleName}' role permissions`);
+ }
+ } catch (error) {
+ logger.error(`Failed to update ${roleName} role permissions:`, error);
+ }
+}
+
+/**
+ * Migrates roles from old schema to new schema structure.
+ * This can be called directly to fix existing roles.
+ *
+ * @param {string} [roleName] - Optional specific role to migrate. If not provided, migrates all roles.
+ * @returns {Promise} Number of roles migrated.
+ */
+const migrateRoleSchema = async function (roleName) {
+ try {
+ // Get roles to migrate
+ let roles;
+ if (roleName) {
+ const role = await Role.findOne({ name: roleName });
+ roles = role ? [role] : [];
+ } else {
+ roles = await Role.find({});
+ }
+
+ logger.info(`Migrating ${roles.length} roles to new schema structure`);
+ let migratedCount = 0;
+
+ for (const role of roles) {
+ const permissionTypes = Object.keys(permissionsSchema.shape || {});
+ const unsetFields = {};
+ let hasOldSchema = false;
+
+ // Check for old schema fields
+ for (const permType of permissionTypes) {
+ if (role[permType] && typeof role[permType] === 'object') {
+ hasOldSchema = true;
+
+ // Ensure permissions object exists
+ role.permissions = role.permissions || {};
+
+ // Migrate permissions from old location to new
+ role.permissions[permType] = {
+ ...role.permissions[permType],
+ ...role[permType],
+ };
+
+ // Mark field for removal
+ unsetFields[permType] = 1;
+ }
+ }
+
+ if (hasOldSchema) {
+ try {
+ logger.info(`Migrating role '${role.name}' from old schema structure`);
+
+ // Simple update operation
+ await Role.updateOne(
+ { _id: role._id },
+ {
+ $set: { permissions: role.permissions },
+ $unset: unsetFields,
+ },
+ );
+
+ // Refresh cache
+ const cache = getLogStores(CacheKeys.ROLES);
+ const updatedRole = await Role.findById(role._id).lean().exec();
+ await cache.set(role.name, updatedRole);
+
+ migratedCount++;
+ logger.info(`Migrated role '${role.name}'`);
+ } catch (error) {
+ logger.error(`Failed to migrate role '${role.name}': ${error.message}`);
+ }
+ }
+ }
+
+ logger.info(`Migration complete: ${migratedCount} roles migrated`);
+ return migratedCount;
+ } catch (error) {
+ logger.error(`Role schema migration failed: ${error.message}`);
+ throw error;
+ }
+};
+
+module.exports = {
+ getRoleByName,
+ updateRoleByName,
+ migrateRoleSchema,
+ updateAccessPermissions,
+};
diff --git a/api/models/Role.spec.js b/api/models/Role.spec.js
new file mode 100644
index 0000000000..0ec2f831e2
--- /dev/null
+++ b/api/models/Role.spec.js
@@ -0,0 +1,511 @@
+const mongoose = require('mongoose');
+const { MongoMemoryServer } = require('mongodb-memory-server');
+const {
+ SystemRoles,
+ Permissions,
+ roleDefaults,
+ PermissionTypes,
+} = require('librechat-data-provider');
+const { getRoleByName, updateAccessPermissions } = require('~/models/Role');
+const getLogStores = require('~/cache/getLogStores');
+const { initializeRoles } = require('~/models');
+const { Role } = require('~/db/models');
+
+// Mock the cache
+jest.mock('~/cache/getLogStores', () =>
+ jest.fn().mockReturnValue({
+ get: jest.fn(),
+ set: jest.fn(),
+ del: jest.fn(),
+ }),
+);
+
+let mongoServer;
+
+beforeAll(async () => {
+ mongoServer = await MongoMemoryServer.create();
+ const mongoUri = mongoServer.getUri();
+ await mongoose.connect(mongoUri);
+});
+
+afterAll(async () => {
+ await mongoose.disconnect();
+ await mongoServer.stop();
+});
+
+beforeEach(async () => {
+ await Role.deleteMany({});
+ getLogStores.mockClear();
+});
+
+describe('updateAccessPermissions', () => {
+ it('should update permissions when changes are needed', async () => {
+ await new Role({
+ name: SystemRoles.USER,
+ permissions: {
+ [PermissionTypes.PROMPTS]: {
+ CREATE: true,
+ USE: true,
+ SHARE: false,
+ },
+ },
+ }).save();
+
+ await updateAccessPermissions(SystemRoles.USER, {
+ [PermissionTypes.PROMPTS]: {
+ CREATE: true,
+ USE: true,
+ SHARE: true,
+ },
+ });
+
+ const updatedRole = await getRoleByName(SystemRoles.USER);
+ expect(updatedRole.permissions[PermissionTypes.PROMPTS]).toEqual({
+ CREATE: true,
+ USE: true,
+ SHARE: true,
+ });
+ });
+
+ it('should not update permissions when no changes are needed', async () => {
+ await new Role({
+ name: SystemRoles.USER,
+ permissions: {
+ [PermissionTypes.PROMPTS]: {
+ CREATE: true,
+ USE: true,
+ SHARE: false,
+ },
+ },
+ }).save();
+
+ await updateAccessPermissions(SystemRoles.USER, {
+ [PermissionTypes.PROMPTS]: {
+ CREATE: true,
+ USE: true,
+ SHARE: false,
+ },
+ });
+
+ const updatedRole = await getRoleByName(SystemRoles.USER);
+ expect(updatedRole.permissions[PermissionTypes.PROMPTS]).toEqual({
+ CREATE: true,
+ USE: true,
+ SHARE: false,
+ });
+ });
+
+ it('should handle non-existent roles', async () => {
+ await updateAccessPermissions('NON_EXISTENT_ROLE', {
+ [PermissionTypes.PROMPTS]: { CREATE: true },
+ });
+ const role = await Role.findOne({ name: 'NON_EXISTENT_ROLE' });
+ expect(role).toBeNull();
+ });
+
+ it('should update only specified permissions', async () => {
+ await new Role({
+ name: SystemRoles.USER,
+ permissions: {
+ [PermissionTypes.PROMPTS]: {
+ CREATE: true,
+ USE: true,
+ SHARE: false,
+ },
+ },
+ }).save();
+
+ await updateAccessPermissions(SystemRoles.USER, {
+ [PermissionTypes.PROMPTS]: { SHARE: true },
+ });
+
+ const updatedRole = await getRoleByName(SystemRoles.USER);
+ expect(updatedRole.permissions[PermissionTypes.PROMPTS]).toEqual({
+ CREATE: true,
+ USE: true,
+ SHARE: true,
+ });
+ });
+
+ it('should handle partial updates', async () => {
+ await new Role({
+ name: SystemRoles.USER,
+ permissions: {
+ [PermissionTypes.PROMPTS]: {
+ CREATE: true,
+ USE: true,
+ SHARE: false,
+ },
+ },
+ }).save();
+
+ await updateAccessPermissions(SystemRoles.USER, {
+ [PermissionTypes.PROMPTS]: { USE: false },
+ });
+
+ const updatedRole = await getRoleByName(SystemRoles.USER);
+ expect(updatedRole.permissions[PermissionTypes.PROMPTS]).toEqual({
+ CREATE: true,
+ USE: false,
+ SHARE: false,
+ });
+ });
+
+ it('should update multiple permission types at once', async () => {
+ await new Role({
+ name: SystemRoles.USER,
+ permissions: {
+ [PermissionTypes.PROMPTS]: { CREATE: true, USE: true, SHARE: false },
+ [PermissionTypes.BOOKMARKS]: { USE: true },
+ },
+ }).save();
+
+ await updateAccessPermissions(SystemRoles.USER, {
+ [PermissionTypes.PROMPTS]: { USE: false, SHARE: true },
+ [PermissionTypes.BOOKMARKS]: { USE: false },
+ });
+
+ const updatedRole = await getRoleByName(SystemRoles.USER);
+ expect(updatedRole.permissions[PermissionTypes.PROMPTS]).toEqual({
+ CREATE: true,
+ USE: false,
+ SHARE: true,
+ });
+ expect(updatedRole.permissions[PermissionTypes.BOOKMARKS]).toEqual({ USE: false });
+ });
+
+ it('should handle updates for a single permission type', async () => {
+ await new Role({
+ name: SystemRoles.USER,
+ permissions: {
+ [PermissionTypes.PROMPTS]: { CREATE: true, USE: true, SHARE: false },
+ },
+ }).save();
+
+ await updateAccessPermissions(SystemRoles.USER, {
+ [PermissionTypes.PROMPTS]: { USE: false, SHARE: true },
+ });
+
+ const updatedRole = await getRoleByName(SystemRoles.USER);
+ expect(updatedRole.permissions[PermissionTypes.PROMPTS]).toEqual({
+ CREATE: true,
+ USE: false,
+ SHARE: true,
+ });
+ });
+
+ it('should update MULTI_CONVO permissions', async () => {
+ await new Role({
+ name: SystemRoles.USER,
+ permissions: {
+ [PermissionTypes.MULTI_CONVO]: { USE: false },
+ },
+ }).save();
+
+ await updateAccessPermissions(SystemRoles.USER, {
+ [PermissionTypes.MULTI_CONVO]: { USE: true },
+ });
+
+ const updatedRole = await getRoleByName(SystemRoles.USER);
+ expect(updatedRole.permissions[PermissionTypes.MULTI_CONVO]).toEqual({ USE: true });
+ });
+
+ it('should update MULTI_CONVO permissions along with other permission types', async () => {
+ await new Role({
+ name: SystemRoles.USER,
+ permissions: {
+ [PermissionTypes.PROMPTS]: { CREATE: true, USE: true, SHARE: false },
+ [PermissionTypes.MULTI_CONVO]: { USE: false },
+ },
+ }).save();
+
+ await updateAccessPermissions(SystemRoles.USER, {
+ [PermissionTypes.PROMPTS]: { SHARE: true },
+ [PermissionTypes.MULTI_CONVO]: { USE: true },
+ });
+
+ const updatedRole = await getRoleByName(SystemRoles.USER);
+ expect(updatedRole.permissions[PermissionTypes.PROMPTS]).toEqual({
+ CREATE: true,
+ USE: true,
+ SHARE: true,
+ });
+ expect(updatedRole.permissions[PermissionTypes.MULTI_CONVO]).toEqual({ USE: true });
+ });
+
+ it('should inherit SHARED_GLOBAL value into SHARE when SHARE is absent from both DB and update', async () => {
+ // Simulates the startup backfill path: caller sends SHARE_PUBLIC but not SHARE;
+ // migration should inherit SHARED_GLOBAL to preserve the deployment's sharing intent.
+ await Role.collection.insertOne({
+ name: SystemRoles.USER,
+ permissions: {
+ [PermissionTypes.PROMPTS]: { USE: true, CREATE: true, SHARED_GLOBAL: true },
+ [PermissionTypes.AGENTS]: { USE: true, CREATE: true, SHARED_GLOBAL: false },
+ },
+ });
+
+ await updateAccessPermissions(SystemRoles.USER, {
+ // No explicit SHARE — migration should inherit from SHARED_GLOBAL
+ [PermissionTypes.PROMPTS]: { SHARE_PUBLIC: false },
+ [PermissionTypes.AGENTS]: { SHARE_PUBLIC: false },
+ });
+
+ const updatedRole = await getRoleByName(SystemRoles.USER);
+
+ // SHARED_GLOBAL=true → SHARE=true (inherited)
+ expect(updatedRole.permissions[PermissionTypes.PROMPTS].SHARE).toBe(true);
+ // SHARED_GLOBAL=false → SHARE=false (inherited)
+ expect(updatedRole.permissions[PermissionTypes.AGENTS].SHARE).toBe(false);
+ // SHARED_GLOBAL cleaned up
+ expect(updatedRole.permissions[PermissionTypes.PROMPTS].SHARED_GLOBAL).toBeUndefined();
+ expect(updatedRole.permissions[PermissionTypes.AGENTS].SHARED_GLOBAL).toBeUndefined();
+ });
+
+ it('should respect explicit SHARE in update payload and not override it with SHARED_GLOBAL', async () => {
+ // Caller explicitly passes SHARE: false even though SHARED_GLOBAL=true in DB.
+ // The explicit intent must win; migration must not silently overwrite it.
+ await Role.collection.insertOne({
+ name: SystemRoles.USER,
+ permissions: {
+ [PermissionTypes.PROMPTS]: { USE: true, SHARED_GLOBAL: true },
+ },
+ });
+
+ await updateAccessPermissions(SystemRoles.USER, {
+ [PermissionTypes.PROMPTS]: { SHARE: false }, // explicit false — should be preserved
+ });
+
+ const updatedRole = await getRoleByName(SystemRoles.USER);
+
+ expect(updatedRole.permissions[PermissionTypes.PROMPTS].SHARE).toBe(false);
+ expect(updatedRole.permissions[PermissionTypes.PROMPTS].SHARED_GLOBAL).toBeUndefined();
+ });
+
+ it('should migrate SHARED_GLOBAL to SHARE even when the permType is not in the update payload', async () => {
+ // Bug #2 regression: cleanup block removes SHARED_GLOBAL but migration block only
+ // runs when the permType is in the update payload. Without the fix, SHARE would be
+ // lost when any other permType (e.g. MULTI_CONVO) is the only thing being updated.
+ await Role.collection.insertOne({
+ name: SystemRoles.USER,
+ permissions: {
+ [PermissionTypes.PROMPTS]: {
+ USE: true,
+ SHARED_GLOBAL: true, // legacy — NO SHARE present
+ },
+ [PermissionTypes.MULTI_CONVO]: { USE: false },
+ },
+ });
+
+ // Only update MULTI_CONVO — PROMPTS is intentionally absent from the payload
+ await updateAccessPermissions(SystemRoles.USER, {
+ [PermissionTypes.MULTI_CONVO]: { USE: true },
+ });
+
+ const updatedRole = await getRoleByName(SystemRoles.USER);
+
+ // SHARE should have been inherited from SHARED_GLOBAL, not silently dropped
+ expect(updatedRole.permissions[PermissionTypes.PROMPTS].SHARE).toBe(true);
+ // SHARED_GLOBAL should be removed
+ expect(updatedRole.permissions[PermissionTypes.PROMPTS].SHARED_GLOBAL).toBeUndefined();
+ // Original USE should be untouched
+ expect(updatedRole.permissions[PermissionTypes.PROMPTS].USE).toBe(true);
+ // The actual update should have applied
+ expect(updatedRole.permissions[PermissionTypes.MULTI_CONVO].USE).toBe(true);
+ });
+
+ it('should remove orphaned SHARED_GLOBAL when SHARE already exists and permType is not in update', async () => {
+ // Safe cleanup case: SHARE already set, SHARED_GLOBAL is just orphaned noise.
+ // SHARE must not be changed; SHARED_GLOBAL must be removed.
+ await Role.collection.insertOne({
+ name: SystemRoles.USER,
+ permissions: {
+ [PermissionTypes.PROMPTS]: {
+ USE: true,
+ SHARE: true, // already migrated
+ SHARED_GLOBAL: true, // orphaned
+ },
+ [PermissionTypes.MULTI_CONVO]: { USE: false },
+ },
+ });
+
+ await updateAccessPermissions(SystemRoles.USER, {
+ [PermissionTypes.MULTI_CONVO]: { USE: true },
+ });
+
+ const updatedRole = await getRoleByName(SystemRoles.USER);
+
+ expect(updatedRole.permissions[PermissionTypes.PROMPTS].SHARED_GLOBAL).toBeUndefined();
+ expect(updatedRole.permissions[PermissionTypes.PROMPTS].SHARE).toBe(true);
+ expect(updatedRole.permissions[PermissionTypes.MULTI_CONVO].USE).toBe(true);
+ });
+
+ it('should not update MULTI_CONVO permissions when no changes are needed', async () => {
+ await new Role({
+ name: SystemRoles.USER,
+ permissions: {
+ [PermissionTypes.MULTI_CONVO]: { USE: true },
+ },
+ }).save();
+
+ await updateAccessPermissions(SystemRoles.USER, {
+ [PermissionTypes.MULTI_CONVO]: { USE: true },
+ });
+
+ const updatedRole = await getRoleByName(SystemRoles.USER);
+ expect(updatedRole.permissions[PermissionTypes.MULTI_CONVO]).toEqual({ USE: true });
+ });
+});
+
+describe('initializeRoles', () => {
+ beforeEach(async () => {
+ await Role.deleteMany({});
+ });
+
+ it('should create default roles if they do not exist', async () => {
+ await initializeRoles();
+
+ const adminRole = await getRoleByName(SystemRoles.ADMIN);
+ const userRole = await getRoleByName(SystemRoles.USER);
+
+ expect(adminRole).toBeTruthy();
+ expect(userRole).toBeTruthy();
+
+ // Check if all permission types exist in the permissions field
+ Object.values(PermissionTypes).forEach((permType) => {
+ expect(adminRole.permissions[permType]).toBeDefined();
+ expect(userRole.permissions[permType]).toBeDefined();
+ });
+
+ // Example: Check default values for ADMIN role
+ expect(adminRole.permissions[PermissionTypes.PROMPTS].SHARE).toBe(true);
+ expect(adminRole.permissions[PermissionTypes.BOOKMARKS].USE).toBe(true);
+ expect(adminRole.permissions[PermissionTypes.AGENTS].CREATE).toBe(true);
+ });
+
+ it('should not modify existing permissions for existing roles', async () => {
+ const customUserRole = {
+ name: SystemRoles.USER,
+ permissions: {
+ [PermissionTypes.PROMPTS]: {
+ [Permissions.USE]: false,
+ [Permissions.CREATE]: true,
+ [Permissions.SHARE]: true,
+ },
+ [PermissionTypes.BOOKMARKS]: { [Permissions.USE]: false },
+ },
+ };
+
+ await new Role(customUserRole).save();
+ await initializeRoles();
+
+ const userRole = await getRoleByName(SystemRoles.USER);
+ expect(userRole.permissions[PermissionTypes.PROMPTS]).toEqual(
+ customUserRole.permissions[PermissionTypes.PROMPTS],
+ );
+ expect(userRole.permissions[PermissionTypes.BOOKMARKS]).toEqual(
+ customUserRole.permissions[PermissionTypes.BOOKMARKS],
+ );
+ expect(userRole.permissions[PermissionTypes.AGENTS]).toBeDefined();
+ });
+
+ it('should add new permission types to existing roles', async () => {
+ const partialUserRole = {
+ name: SystemRoles.USER,
+ permissions: {
+ [PermissionTypes.PROMPTS]:
+ roleDefaults[SystemRoles.USER].permissions[PermissionTypes.PROMPTS],
+ [PermissionTypes.BOOKMARKS]:
+ roleDefaults[SystemRoles.USER].permissions[PermissionTypes.BOOKMARKS],
+ },
+ };
+
+ await new Role(partialUserRole).save();
+ await initializeRoles();
+
+ const userRole = await getRoleByName(SystemRoles.USER);
+ expect(userRole.permissions[PermissionTypes.AGENTS]).toBeDefined();
+ expect(userRole.permissions[PermissionTypes.AGENTS].CREATE).toBeDefined();
+ expect(userRole.permissions[PermissionTypes.AGENTS].USE).toBeDefined();
+ expect(userRole.permissions[PermissionTypes.AGENTS].SHARE).toBeDefined();
+ });
+
+ it('should handle multiple runs without duplicating or modifying data', async () => {
+ await initializeRoles();
+ await initializeRoles();
+
+ const adminRoles = await Role.find({ name: SystemRoles.ADMIN });
+ const userRoles = await Role.find({ name: SystemRoles.USER });
+
+ expect(adminRoles).toHaveLength(1);
+ expect(userRoles).toHaveLength(1);
+
+ const adminPerms = adminRoles[0].toObject().permissions;
+ const userPerms = userRoles[0].toObject().permissions;
+ Object.values(PermissionTypes).forEach((permType) => {
+ expect(adminPerms[permType]).toBeDefined();
+ expect(userPerms[permType]).toBeDefined();
+ });
+ });
+
+ it('should update roles with missing permission types from roleDefaults', async () => {
+ const partialAdminRole = {
+ name: SystemRoles.ADMIN,
+ permissions: {
+ [PermissionTypes.PROMPTS]: {
+ [Permissions.USE]: false,
+ [Permissions.CREATE]: false,
+ [Permissions.SHARE]: false,
+ },
+ [PermissionTypes.BOOKMARKS]:
+ roleDefaults[SystemRoles.ADMIN].permissions[PermissionTypes.BOOKMARKS],
+ },
+ };
+
+ await new Role(partialAdminRole).save();
+ await initializeRoles();
+
+ const adminRole = await getRoleByName(SystemRoles.ADMIN);
+ expect(adminRole.permissions[PermissionTypes.PROMPTS]).toEqual(
+ partialAdminRole.permissions[PermissionTypes.PROMPTS],
+ );
+ expect(adminRole.permissions[PermissionTypes.AGENTS]).toBeDefined();
+ expect(adminRole.permissions[PermissionTypes.AGENTS].CREATE).toBeDefined();
+ expect(adminRole.permissions[PermissionTypes.AGENTS].USE).toBeDefined();
+ expect(adminRole.permissions[PermissionTypes.AGENTS].SHARE).toBeDefined();
+ });
+
+ it('should include MULTI_CONVO permissions when creating default roles', async () => {
+ await initializeRoles();
+
+ const adminRole = await getRoleByName(SystemRoles.ADMIN);
+ const userRole = await getRoleByName(SystemRoles.USER);
+
+ expect(adminRole.permissions[PermissionTypes.MULTI_CONVO]).toBeDefined();
+ expect(userRole.permissions[PermissionTypes.MULTI_CONVO]).toBeDefined();
+ expect(adminRole.permissions[PermissionTypes.MULTI_CONVO].USE).toBe(
+ roleDefaults[SystemRoles.ADMIN].permissions[PermissionTypes.MULTI_CONVO].USE,
+ );
+ expect(userRole.permissions[PermissionTypes.MULTI_CONVO].USE).toBe(
+ roleDefaults[SystemRoles.USER].permissions[PermissionTypes.MULTI_CONVO].USE,
+ );
+ });
+
+ it('should add MULTI_CONVO permissions to existing roles without them', async () => {
+ const partialUserRole = {
+ name: SystemRoles.USER,
+ permissions: {
+ [PermissionTypes.PROMPTS]:
+ roleDefaults[SystemRoles.USER].permissions[PermissionTypes.PROMPTS],
+ [PermissionTypes.BOOKMARKS]:
+ roleDefaults[SystemRoles.USER].permissions[PermissionTypes.BOOKMARKS],
+ },
+ };
+
+ await new Role(partialUserRole).save();
+ await initializeRoles();
+
+ const userRole = await getRoleByName(SystemRoles.USER);
+ expect(userRole.permissions[PermissionTypes.MULTI_CONVO]).toBeDefined();
+ expect(userRole.permissions[PermissionTypes.MULTI_CONVO].USE).toBeDefined();
+ });
+});
diff --git a/api/models/ToolCall.js b/api/models/ToolCall.js
new file mode 100644
index 0000000000..689386114b
--- /dev/null
+++ b/api/models/ToolCall.js
@@ -0,0 +1,96 @@
+const { ToolCall } = require('~/db/models');
+
+/**
+ * Create a new tool call
+ * @param {IToolCallData} toolCallData - The tool call data
+ * @returns {Promise} The created tool call document
+ */
+async function createToolCall(toolCallData) {
+ try {
+ return await ToolCall.create(toolCallData);
+ } catch (error) {
+ throw new Error(`Error creating tool call: ${error.message}`);
+ }
+}
+
+/**
+ * Get a tool call by ID
+ * @param {string} id - The tool call document ID
+ * @returns {Promise} The tool call document or null if not found
+ */
+async function getToolCallById(id) {
+ try {
+ return await ToolCall.findById(id).lean();
+ } catch (error) {
+ throw new Error(`Error fetching tool call: ${error.message}`);
+ }
+}
+
+/**
+ * Get tool calls by message ID and user
+ * @param {string} messageId - The message ID
+ * @param {string} userId - The user's ObjectId
+ * @returns {Promise} Array of tool call documents
+ */
+async function getToolCallsByMessage(messageId, userId) {
+ try {
+ return await ToolCall.find({ messageId, user: userId }).lean();
+ } catch (error) {
+ throw new Error(`Error fetching tool calls: ${error.message}`);
+ }
+}
+
+/**
+ * Get tool calls by conversation ID and user
+ * @param {string} conversationId - The conversation ID
+ * @param {string} userId - The user's ObjectId
+ * @returns {Promise} Array of tool call documents
+ */
+async function getToolCallsByConvo(conversationId, userId) {
+ try {
+ return await ToolCall.find({ conversationId, user: userId }).lean();
+ } catch (error) {
+ throw new Error(`Error fetching tool calls: ${error.message}`);
+ }
+}
+
+/**
+ * Update a tool call
+ * @param {string} id - The tool call document ID
+ * @param {Partial} updateData - The data to update
+ * @returns {Promise} The updated tool call document or null if not found
+ */
+async function updateToolCall(id, updateData) {
+ try {
+ return await ToolCall.findByIdAndUpdate(id, updateData, { new: true }).lean();
+ } catch (error) {
+ throw new Error(`Error updating tool call: ${error.message}`);
+ }
+}
+
+/**
+ * Delete a tool call
+ * @param {string} userId - The related user's ObjectId
+ * @param {string} [conversationId] - The tool call conversation ID
+ * @returns {Promise<{ ok?: number; n?: number; deletedCount?: number }>} The result of the delete operation
+ */
+async function deleteToolCalls(userId, conversationId) {
+ try {
+ const query = { user: userId };
+ if (conversationId) {
+ query.conversationId = conversationId;
+ }
+ return await ToolCall.deleteMany(query);
+ } catch (error) {
+ throw new Error(`Error deleting tool call: ${error.message}`);
+ }
+}
+
+module.exports = {
+ createToolCall,
+ updateToolCall,
+ deleteToolCalls,
+ getToolCallById,
+ getToolCallsByConvo,
+ getToolCallsByMessage,
+};
diff --git a/api/models/Transaction.js b/api/models/Transaction.js
new file mode 100644
index 0000000000..e553e2bb3b
--- /dev/null
+++ b/api/models/Transaction.js
@@ -0,0 +1,356 @@
+const { logger } = require('@librechat/data-schemas');
+const { getMultiplier, getCacheMultiplier } = require('./tx');
+const { Transaction, Balance } = require('~/db/models');
+
+const cancelRate = 1.15;
+
+/**
+ * Updates a user's token balance based on a transaction using optimistic concurrency control
+ * without schema changes. Compatible with DocumentDB.
+ * @async
+ * @function
+ * @param {Object} params - The function parameters.
+ * @param {string|mongoose.Types.ObjectId} params.user - The user ID.
+ * @param {number} params.incrementValue - The value to increment the balance by (can be negative).
+ * @param {import('mongoose').UpdateQuery['$set']} [params.setValues] - Optional additional fields to set.
+ * @returns {Promise} Returns the updated balance document (lean).
+ * @throws {Error} Throws an error if the update fails after multiple retries.
+ */
+const updateBalance = async ({ user, incrementValue, setValues }) => {
+ let maxRetries = 10; // Number of times to retry on conflict
+ let delay = 50; // Initial retry delay in ms
+ let lastError = null;
+
+ for (let attempt = 1; attempt <= maxRetries; attempt++) {
+ let currentBalanceDoc;
+ try {
+ // 1. Read the current document state
+ currentBalanceDoc = await Balance.findOne({ user }).lean();
+ const currentCredits = currentBalanceDoc ? currentBalanceDoc.tokenCredits : 0;
+
+ // 2. Calculate the desired new state
+ const potentialNewCredits = currentCredits + incrementValue;
+ const newCredits = Math.max(0, potentialNewCredits); // Ensure balance doesn't go below zero
+
+ // 3. Prepare the update payload
+ const updatePayload = {
+ $set: {
+ tokenCredits: newCredits,
+ ...(setValues || {}), // Merge other values to set
+ },
+ };
+
+ // 4. Attempt the conditional update or upsert
+ let updatedBalance = null;
+ if (currentBalanceDoc) {
+ // --- Document Exists: Perform Conditional Update ---
+ // Try to update only if the tokenCredits match the value we read (currentCredits)
+ updatedBalance = await Balance.findOneAndUpdate(
+ {
+ user: user,
+ tokenCredits: currentCredits, // Optimistic lock: condition based on the read value
+ },
+ updatePayload,
+ {
+ new: true, // Return the modified document
+ // lean: true, // .lean() is applied after query execution in Mongoose >= 6
+ },
+ ).lean(); // Use lean() for plain JS object
+
+ if (updatedBalance) {
+ // Success! The update was applied based on the expected current state.
+ return updatedBalance;
+ }
+ // If updatedBalance is null, it means tokenCredits changed between read and write (conflict).
+ lastError = new Error(`Concurrency conflict for user ${user} on attempt ${attempt}.`);
+ // Proceed to retry logic below.
+ } else {
+ // --- Document Does Not Exist: Perform Conditional Upsert ---
+ // Try to insert the document, but only if it still doesn't exist.
+ // Using tokenCredits: {$exists: false} helps prevent race conditions where
+ // another process creates the doc between our findOne and findOneAndUpdate.
+ try {
+ updatedBalance = await Balance.findOneAndUpdate(
+ {
+ user: user,
+ // Attempt to match only if the document doesn't exist OR was just created
+ // without tokenCredits (less likely but possible). A simple { user } filter
+ // might also work, relying on the retry for conflicts.
+ // Let's use a simpler filter and rely on retry for races.
+ // tokenCredits: { $exists: false } // This condition might be too strict if doc exists with 0 credits
+ },
+ updatePayload,
+ {
+ upsert: true, // Create if doesn't exist
+ new: true, // Return the created/updated document
+ // setDefaultsOnInsert: true, // Ensure schema defaults are applied on insert
+ // lean: true,
+ },
+ ).lean();
+
+ if (updatedBalance) {
+ // Upsert succeeded (likely created the document)
+ return updatedBalance;
+ }
+ // If null, potentially a rare race condition during upsert. Retry should handle it.
+ lastError = new Error(
+ `Upsert race condition suspected for user ${user} on attempt ${attempt}.`,
+ );
+ } catch (error) {
+ if (error.code === 11000) {
+ // E11000 duplicate key error on index
+ // This means another process created the document *just* before our upsert.
+ // It's a concurrency conflict during creation. We should retry.
+ lastError = error; // Store the error
+ // Proceed to retry logic below.
+ } else {
+ // Different error, rethrow
+ throw error;
+ }
+ }
+ } // End if/else (document exists?)
+ } catch (error) {
+ // Catch errors from findOne or unexpected findOneAndUpdate errors
+ logger.error(`[updateBalance] Error during attempt ${attempt} for user ${user}:`, error);
+ lastError = error; // Store the error
+ // Consider stopping retries for non-transient errors, but for now, we retry.
+ }
+
+ // If we reached here, it means the update failed (conflict or error), wait and retry
+ if (attempt < maxRetries) {
+ const jitter = Math.random() * delay * 0.5; // Add jitter to delay
+ await new Promise((resolve) => setTimeout(resolve, delay + jitter));
+ delay = Math.min(delay * 2, 2000); // Exponential backoff with cap
+ }
+ } // End for loop (retries)
+
+ // If loop finishes without success, throw the last encountered error or a generic one
+ logger.error(
+ `[updateBalance] Failed to update balance for user ${user} after ${maxRetries} attempts.`,
+ );
+ throw (
+ lastError ||
+ new Error(
+ `Failed to update balance for user ${user} after maximum retries due to persistent conflicts.`,
+ )
+ );
+};
+
+/** Method to calculate and set the tokenValue for a transaction */
+function calculateTokenValue(txn) {
+ const { valueKey, tokenType, model, endpointTokenConfig, inputTokenCount } = txn;
+ const multiplier = Math.abs(
+ getMultiplier({ valueKey, tokenType, model, endpointTokenConfig, inputTokenCount }),
+ );
+ txn.rate = multiplier;
+ txn.tokenValue = txn.rawAmount * multiplier;
+ if (txn.context && txn.tokenType === 'completion' && txn.context === 'incomplete') {
+ txn.tokenValue = Math.ceil(txn.tokenValue * cancelRate);
+ txn.rate *= cancelRate;
+ }
+}
+
+/**
+ * New static method to create an auto-refill transaction that does NOT trigger a balance update.
+ * @param {object} txData - Transaction data.
+ * @param {string} txData.user - The user ID.
+ * @param {string} txData.tokenType - The type of token.
+ * @param {string} txData.context - The context of the transaction.
+ * @param {number} txData.rawAmount - The raw amount of tokens.
+ * @returns {Promise} - The created transaction.
+ */
+async function createAutoRefillTransaction(txData) {
+ if (txData.rawAmount != null && isNaN(txData.rawAmount)) {
+ return;
+ }
+ const transaction = new Transaction(txData);
+ transaction.endpointTokenConfig = txData.endpointTokenConfig;
+ transaction.inputTokenCount = txData.inputTokenCount;
+ calculateTokenValue(transaction);
+ await transaction.save();
+
+ const balanceResponse = await updateBalance({
+ user: transaction.user,
+ incrementValue: txData.rawAmount,
+ setValues: { lastRefill: new Date() },
+ });
+ const result = {
+ rate: transaction.rate,
+ user: transaction.user.toString(),
+ balance: balanceResponse.tokenCredits,
+ };
+ logger.debug('[Balance.check] Auto-refill performed', result);
+ result.transaction = transaction;
+ return result;
+}
+
+/**
+ * Static method to create a transaction and update the balance
+ * @param {txData} _txData - Transaction data.
+ */
+async function createTransaction(_txData) {
+ const { balance, transactions, ...txData } = _txData;
+ if (txData.rawAmount != null && isNaN(txData.rawAmount)) {
+ return;
+ }
+
+ if (transactions?.enabled === false) {
+ return;
+ }
+
+ const transaction = new Transaction(txData);
+ transaction.endpointTokenConfig = txData.endpointTokenConfig;
+ transaction.inputTokenCount = txData.inputTokenCount;
+ calculateTokenValue(transaction);
+
+ await transaction.save();
+ if (!balance?.enabled) {
+ return;
+ }
+
+ let incrementValue = transaction.tokenValue;
+ const balanceResponse = await updateBalance({
+ user: transaction.user,
+ incrementValue,
+ });
+
+ return {
+ rate: transaction.rate,
+ user: transaction.user.toString(),
+ balance: balanceResponse.tokenCredits,
+ [transaction.tokenType]: incrementValue,
+ };
+}
+
+/**
+ * Static method to create a structured transaction and update the balance
+ * @param {txData} _txData - Transaction data.
+ */
+async function createStructuredTransaction(_txData) {
+ const { balance, transactions, ...txData } = _txData;
+ if (transactions?.enabled === false) {
+ return;
+ }
+
+ const transaction = new Transaction(txData);
+ transaction.endpointTokenConfig = txData.endpointTokenConfig;
+ transaction.inputTokenCount = txData.inputTokenCount;
+
+ calculateStructuredTokenValue(transaction);
+
+ await transaction.save();
+
+ if (!balance?.enabled) {
+ return;
+ }
+
+ let incrementValue = transaction.tokenValue;
+
+ const balanceResponse = await updateBalance({
+ user: transaction.user,
+ incrementValue,
+ });
+
+ return {
+ rate: transaction.rate,
+ user: transaction.user.toString(),
+ balance: balanceResponse.tokenCredits,
+ [transaction.tokenType]: incrementValue,
+ };
+}
+
+/** Method to calculate token value for structured tokens */
+function calculateStructuredTokenValue(txn) {
+ if (!txn.tokenType) {
+ txn.tokenValue = txn.rawAmount;
+ return;
+ }
+
+ const { model, endpointTokenConfig, inputTokenCount } = txn;
+
+ if (txn.tokenType === 'prompt') {
+ const inputMultiplier = getMultiplier({
+ tokenType: 'prompt',
+ model,
+ endpointTokenConfig,
+ inputTokenCount,
+ });
+ const writeMultiplier =
+ getCacheMultiplier({ cacheType: 'write', model, endpointTokenConfig }) ?? inputMultiplier;
+ const readMultiplier =
+ getCacheMultiplier({ cacheType: 'read', model, endpointTokenConfig }) ?? inputMultiplier;
+
+ txn.rateDetail = {
+ input: inputMultiplier,
+ write: writeMultiplier,
+ read: readMultiplier,
+ };
+
+ const totalPromptTokens =
+ Math.abs(txn.inputTokens || 0) +
+ Math.abs(txn.writeTokens || 0) +
+ Math.abs(txn.readTokens || 0);
+
+ if (totalPromptTokens > 0) {
+ txn.rate =
+ (Math.abs(inputMultiplier * (txn.inputTokens || 0)) +
+ Math.abs(writeMultiplier * (txn.writeTokens || 0)) +
+ Math.abs(readMultiplier * (txn.readTokens || 0))) /
+ totalPromptTokens;
+ } else {
+ txn.rate = Math.abs(inputMultiplier); // Default to input rate if no tokens
+ }
+
+ txn.tokenValue = -(
+ Math.abs(txn.inputTokens || 0) * inputMultiplier +
+ Math.abs(txn.writeTokens || 0) * writeMultiplier +
+ Math.abs(txn.readTokens || 0) * readMultiplier
+ );
+
+ txn.rawAmount = -totalPromptTokens;
+ } else if (txn.tokenType === 'completion') {
+ const multiplier = getMultiplier({
+ tokenType: txn.tokenType,
+ model,
+ endpointTokenConfig,
+ inputTokenCount,
+ });
+ txn.rate = Math.abs(multiplier);
+ txn.tokenValue = -Math.abs(txn.rawAmount) * multiplier;
+ txn.rawAmount = -Math.abs(txn.rawAmount);
+ }
+
+ if (txn.context && txn.tokenType === 'completion' && txn.context === 'incomplete') {
+ txn.tokenValue = Math.ceil(txn.tokenValue * cancelRate);
+ txn.rate *= cancelRate;
+ if (txn.rateDetail) {
+ txn.rateDetail = Object.fromEntries(
+ Object.entries(txn.rateDetail).map(([k, v]) => [k, v * cancelRate]),
+ );
+ }
+ }
+}
+
+/**
+ * Queries and retrieves transactions based on a given filter.
+ * @async
+ * @function getTransactions
+ * @param {Object} filter - MongoDB filter object to apply when querying transactions.
+ * @returns {Promise} A promise that resolves to an array of matched transactions.
+ * @throws {Error} Throws an error if querying the database fails.
+ */
+async function getTransactions(filter) {
+ try {
+ return await Transaction.find(filter).lean();
+ } catch (error) {
+ logger.error('Error querying transactions:', error);
+ throw error;
+ }
+}
+
+module.exports = {
+ getTransactions,
+ createTransaction,
+ createAutoRefillTransaction,
+ createStructuredTransaction,
+};
diff --git a/packages/data-schemas/src/methods/transaction.spec.ts b/api/models/Transaction.spec.js
similarity index 70%
rename from packages/data-schemas/src/methods/transaction.spec.ts
rename to api/models/Transaction.spec.js
index ee7df36c57..4b478d4dc3 100644
--- a/packages/data-schemas/src/methods/transaction.spec.ts
+++ b/api/models/Transaction.spec.js
@@ -1,63 +1,14 @@
-import mongoose from 'mongoose';
-import { MongoMemoryServer } from 'mongodb-memory-server';
-import type { ITransaction } from '~/schema/transaction';
-import type { TxData } from './transaction';
-import type { IBalance } from '..';
-import { createTxMethods, tokenValues, premiumTokenValues } from './tx';
-import { matchModelName, findMatchingPattern } from './test-helpers';
-import { createSpendTokensMethods } from './spendTokens';
-import { createTransactionMethods } from './transaction';
-import { createModels } from '~/models';
-
-jest.mock('~/config/winston', () => ({
- error: jest.fn(),
- warn: jest.fn(),
- info: jest.fn(),
- debug: jest.fn(),
-}));
-
-let mongoServer: InstanceType;
-let Balance: mongoose.Model;
-let Transaction: mongoose.Model;
-let spendTokens: ReturnType['spendTokens'];
-let spendStructuredTokens: ReturnType['spendStructuredTokens'];
-let createTransaction: ReturnType['createTransaction'];
-let createStructuredTransaction: ReturnType<
- typeof createTransactionMethods
->['createStructuredTransaction'];
-let getMultiplier: ReturnType['getMultiplier'];
-let getCacheMultiplier: ReturnType['getCacheMultiplier'];
+const mongoose = require('mongoose');
+const { MongoMemoryServer } = require('mongodb-memory-server');
+const { spendTokens, spendStructuredTokens } = require('./spendTokens');
+const { getMultiplier, getCacheMultiplier, premiumTokenValues, tokenValues } = require('./tx');
+const { createTransaction, createStructuredTransaction } = require('./Transaction');
+const { Balance, Transaction } = require('~/db/models');
+let mongoServer;
beforeAll(async () => {
mongoServer = await MongoMemoryServer.create();
const mongoUri = mongoServer.getUri();
-
- // Register models
- const models = createModels(mongoose);
- Object.assign(mongoose.models, models);
-
- Balance = mongoose.models.Balance;
- Transaction = mongoose.models.Transaction;
-
- // Create methods from factories (following the chain in methods/index.ts)
- const txMethods = createTxMethods(mongoose, { matchModelName, findMatchingPattern });
- getMultiplier = txMethods.getMultiplier;
- getCacheMultiplier = txMethods.getCacheMultiplier;
-
- const transactionMethods = createTransactionMethods(mongoose, {
- getMultiplier: txMethods.getMultiplier,
- getCacheMultiplier: txMethods.getCacheMultiplier,
- });
- createTransaction = transactionMethods.createTransaction;
- createStructuredTransaction = transactionMethods.createStructuredTransaction;
-
- const spendMethods = createSpendTokensMethods(mongoose, {
- createTransaction: transactionMethods.createTransaction,
- createStructuredTransaction: transactionMethods.createStructuredTransaction,
- });
- spendTokens = spendMethods.spendTokens;
- spendStructuredTokens = spendMethods.spendStructuredTokens;
-
await mongoose.connect(mongoUri);
});
@@ -102,7 +53,7 @@ describe('Regular Token Spending Tests', () => {
const expectedTotalCost = 100 * promptMultiplier + 50 * completionMultiplier;
const expectedBalance = initialBalance - expectedTotalCost;
- expect(updatedBalance?.tokenCredits).toBeCloseTo(expectedBalance, 0);
+ expect(updatedBalance.tokenCredits).toBeCloseTo(expectedBalance, 0);
});
test('spendTokens should handle zero completion tokens', async () => {
@@ -133,7 +84,7 @@ describe('Regular Token Spending Tests', () => {
const updatedBalance = await Balance.findOne({ user: userId });
const promptMultiplier = getMultiplier({ model, tokenType: 'prompt' });
const expectedCost = 100 * promptMultiplier;
- expect(updatedBalance?.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
+ expect(updatedBalance.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
});
test('spendTokens should handle undefined token counts', async () => {
@@ -186,7 +137,7 @@ describe('Regular Token Spending Tests', () => {
const updatedBalance = await Balance.findOne({ user: userId });
const promptMultiplier = getMultiplier({ model, tokenType: 'prompt' });
const expectedCost = 100 * promptMultiplier;
- expect(updatedBalance?.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
+ expect(updatedBalance.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
});
test('spendTokens should not update balance when balance feature is disabled', async () => {
@@ -215,7 +166,7 @@ describe('Regular Token Spending Tests', () => {
// Assert: Balance should remain unchanged.
const updatedBalance = await Balance.findOne({ user: userId });
- expect(updatedBalance?.tokenCredits).toBe(initialBalance);
+ expect(updatedBalance.tokenCredits).toBe(initialBalance);
});
});
@@ -247,8 +198,8 @@ describe('Structured Token Spending Tests', () => {
const promptMultiplier = getMultiplier({ model, tokenType: 'prompt' });
const completionMultiplier = getMultiplier({ model, tokenType: 'completion' });
- const writeMultiplier = getCacheMultiplier({ model, cacheType: 'write' }) ?? promptMultiplier;
- const readMultiplier = getCacheMultiplier({ model, cacheType: 'read' }) ?? promptMultiplier;
+ const writeMultiplier = getCacheMultiplier({ model, cacheType: 'write' });
+ const readMultiplier = getCacheMultiplier({ model, cacheType: 'read' });
// Act
const result = await spendStructuredTokens(txData, tokenUsage);
@@ -263,18 +214,16 @@ describe('Structured Token Spending Tests', () => {
const expectedBalance = initialBalance - expectedTotalCost;
// Assert
- expect(result?.completion?.balance).toBeLessThan(initialBalance);
+ expect(result.completion.balance).toBeLessThan(initialBalance);
const allowedDifference = 100;
- expect(Math.abs((result?.completion?.balance ?? 0) - expectedBalance)).toBeLessThan(
- allowedDifference,
- );
- const balanceDecrease = initialBalance - (result?.completion?.balance ?? 0);
+ expect(Math.abs(result.completion.balance - expectedBalance)).toBeLessThan(allowedDifference);
+ const balanceDecrease = initialBalance - result.completion.balance;
expect(balanceDecrease).toBeCloseTo(expectedTotalCost, 0);
const expectedPromptTokenValue = -expectedPromptCost;
const expectedCompletionTokenValue = -expectedCompletionCost;
- expect(result?.prompt?.prompt).toBeCloseTo(expectedPromptTokenValue, 1);
- expect(result?.completion?.completion).toBe(expectedCompletionTokenValue);
+ expect(result.prompt.prompt).toBeCloseTo(expectedPromptTokenValue, 1);
+ expect(result.completion.completion).toBe(expectedCompletionTokenValue);
});
test('should handle zero completion tokens in structured spending', async () => {
@@ -307,7 +256,7 @@ describe('Structured Token Spending Tests', () => {
// Assert
expect(result.prompt).toBeDefined();
expect(result.completion).toBeUndefined();
- expect(result?.prompt?.prompt).toBeLessThan(0);
+ expect(result.prompt.prompt).toBeLessThan(0);
});
test('should handle only prompt tokens in structured spending', async () => {
@@ -339,7 +288,7 @@ describe('Structured Token Spending Tests', () => {
// Assert
expect(result.prompt).toBeDefined();
expect(result.completion).toBeUndefined();
- expect(result?.prompt?.prompt).toBeLessThan(0);
+ expect(result.prompt.prompt).toBeLessThan(0);
});
test('should handle undefined token counts in structured spending', async () => {
@@ -398,7 +347,7 @@ describe('Structured Token Spending Tests', () => {
// Assert:
// (Assuming a multiplier for completion of 15 and a cancel rate of 1.15 as noted in the original test.)
- expect(result?.completion?.completion).toBeCloseTo(-50 * 15 * 1.15, 0);
+ expect(result.completion.completion).toBeCloseTo(-50 * 15 * 1.15, 0);
});
});
@@ -410,7 +359,7 @@ describe('NaN Handling Tests', () => {
await Balance.create({ user: userId, tokenCredits: initialBalance });
const model = 'gpt-3.5-turbo';
- const txData: TxData = {
+ const txData = {
user: userId,
conversationId: 'test-conversation-id',
model,
@@ -427,7 +376,7 @@ describe('NaN Handling Tests', () => {
// Assert: No transaction should be created and balance remains unchanged.
expect(result).toBeUndefined();
const balance = await Balance.findOne({ user: userId });
- expect(balance?.tokenCredits).toBe(initialBalance);
+ expect(balance.tokenCredits).toBe(initialBalance);
});
});
@@ -439,7 +388,7 @@ describe('Transactions Config Tests', () => {
await Balance.create({ user: userId, tokenCredits: initialBalance });
const model = 'gpt-3.5-turbo';
- const txData: TxData = {
+ const txData = {
user: userId,
conversationId: 'test-conversation-id',
model,
@@ -458,7 +407,7 @@ describe('Transactions Config Tests', () => {
const transactions = await Transaction.find({ user: userId });
expect(transactions).toHaveLength(0);
const balance = await Balance.findOne({ user: userId });
- expect(balance?.tokenCredits).toBe(initialBalance);
+ expect(balance.tokenCredits).toBe(initialBalance);
});
test('createTransaction should save when transactions.enabled is true', async () => {
@@ -468,7 +417,7 @@ describe('Transactions Config Tests', () => {
await Balance.create({ user: userId, tokenCredits: initialBalance });
const model = 'gpt-3.5-turbo';
- const txData: TxData = {
+ const txData = {
user: userId,
conversationId: 'test-conversation-id',
model,
@@ -485,7 +434,7 @@ describe('Transactions Config Tests', () => {
// Assert: Transaction should be created
expect(result).toBeDefined();
- expect(result?.balance).toBeLessThan(initialBalance);
+ expect(result.balance).toBeLessThan(initialBalance);
const transactions = await Transaction.find({ user: userId });
expect(transactions).toHaveLength(1);
expect(transactions[0].rawAmount).toBe(-100);
@@ -498,7 +447,7 @@ describe('Transactions Config Tests', () => {
await Balance.create({ user: userId, tokenCredits: initialBalance });
const model = 'gpt-3.5-turbo';
- const txData: TxData = {
+ const txData = {
user: userId,
conversationId: 'test-conversation-id',
model,
@@ -515,7 +464,7 @@ describe('Transactions Config Tests', () => {
// Assert: Transaction should be created (backward compatibility)
expect(result).toBeDefined();
- expect(result?.balance).toBeLessThan(initialBalance);
+ expect(result.balance).toBeLessThan(initialBalance);
const transactions = await Transaction.find({ user: userId });
expect(transactions).toHaveLength(1);
});
@@ -527,7 +476,7 @@ describe('Transactions Config Tests', () => {
await Balance.create({ user: userId, tokenCredits: initialBalance });
const model = 'gpt-3.5-turbo';
- const txData: TxData = {
+ const txData = {
user: userId,
conversationId: 'test-conversation-id',
model,
@@ -548,7 +497,7 @@ describe('Transactions Config Tests', () => {
expect(transactions).toHaveLength(1);
expect(transactions[0].rawAmount).toBe(-100);
const balance = await Balance.findOne({ user: userId });
- expect(balance?.tokenCredits).toBe(initialBalance);
+ expect(balance.tokenCredits).toBe(initialBalance);
});
test('createStructuredTransaction should not save when transactions.enabled is false', async () => {
@@ -558,7 +507,7 @@ describe('Transactions Config Tests', () => {
await Balance.create({ user: userId, tokenCredits: initialBalance });
const model = 'claude-3-5-sonnet';
- const txData: TxData = {
+ const txData = {
user: userId,
conversationId: 'test-conversation-id',
model,
@@ -578,7 +527,7 @@ describe('Transactions Config Tests', () => {
const transactions = await Transaction.find({ user: userId });
expect(transactions).toHaveLength(0);
const balance = await Balance.findOne({ user: userId });
- expect(balance?.tokenCredits).toBe(initialBalance);
+ expect(balance.tokenCredits).toBe(initialBalance);
});
test('createStructuredTransaction should save transaction but not update balance when balance is disabled but transactions enabled', async () => {
@@ -588,7 +537,7 @@ describe('Transactions Config Tests', () => {
await Balance.create({ user: userId, tokenCredits: initialBalance });
const model = 'claude-3-5-sonnet';
- const txData: TxData = {
+ const txData = {
user: userId,
conversationId: 'test-conversation-id',
model,
@@ -612,7 +561,7 @@ describe('Transactions Config Tests', () => {
expect(transactions[0].writeTokens).toBe(-100);
expect(transactions[0].readTokens).toBe(-5);
const balance = await Balance.findOne({ user: userId });
- expect(balance?.tokenCredits).toBe(initialBalance);
+ expect(balance.tokenCredits).toBe(initialBalance);
});
});
@@ -636,11 +585,11 @@ describe('calculateTokenValue Edge Cases', () => {
});
const expectedRate = getMultiplier({ model, tokenType: 'prompt' });
- expect(result?.rate).toBe(expectedRate);
+ expect(result.rate).toBe(expectedRate);
const tx = await Transaction.findOne({ user: userId });
- expect(tx?.tokenValue).toBe(-promptTokens * expectedRate);
- expect(tx?.rate).toBe(expectedRate);
+ expect(tx.tokenValue).toBe(-promptTokens * expectedRate);
+ expect(tx.rate).toBe(expectedRate);
});
test('should derive valueKey and apply correct rate for an unknown model with tokenType', async () => {
@@ -659,9 +608,9 @@ describe('calculateTokenValue Edge Cases', () => {
});
const tx = await Transaction.findOne({ user: userId });
- expect(tx?.rate).toBeDefined();
- expect(tx?.rate).toBeGreaterThan(0);
- expect(tx?.tokenValue).toBe((tx?.rawAmount ?? 0) * (tx?.rate ?? 0));
+ expect(tx.rate).toBeDefined();
+ expect(tx.rate).toBeGreaterThan(0);
+ expect(tx.tokenValue).toBe(tx.rawAmount * tx.rate);
});
test('should correctly apply model-derived multiplier without valueKey for completion', async () => {
@@ -684,10 +633,10 @@ describe('calculateTokenValue Edge Cases', () => {
const expectedRate = getMultiplier({ model, tokenType: 'completion' });
expect(expectedRate).toBe(tokenValues[model].completion);
- expect(result?.rate).toBe(expectedRate);
+ expect(result.rate).toBe(expectedRate);
const updatedBalance = await Balance.findOne({ user: userId });
- expect(updatedBalance?.tokenCredits).toBeCloseTo(
+ expect(updatedBalance.tokenCredits).toBeCloseTo(
initialBalance - completionTokens * expectedRate,
0,
);
@@ -721,7 +670,7 @@ describe('Premium Token Pricing Integration Tests', () => {
promptTokens * standardPromptRate + completionTokens * standardCompletionRate;
const updatedBalance = await Balance.findOne({ user: userId });
- expect(updatedBalance?.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
+ expect(updatedBalance.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
});
test('spendTokens should apply premium pricing when prompt tokens exceed premium threshold', async () => {
@@ -750,7 +699,7 @@ describe('Premium Token Pricing Integration Tests', () => {
promptTokens * premiumPromptRate + completionTokens * premiumCompletionRate;
const updatedBalance = await Balance.findOne({ user: userId });
- expect(updatedBalance?.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
+ expect(updatedBalance.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
});
test('spendTokens should apply standard pricing at exactly the premium threshold', async () => {
@@ -779,7 +728,7 @@ describe('Premium Token Pricing Integration Tests', () => {
promptTokens * standardPromptRate + completionTokens * standardCompletionRate;
const updatedBalance = await Balance.findOne({ user: userId });
- expect(updatedBalance?.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
+ expect(updatedBalance.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
});
test('spendStructuredTokens should apply premium pricing when total input tokens exceed threshold', async () => {
@@ -813,13 +762,8 @@ describe('Premium Token Pricing Integration Tests', () => {
const premiumPromptRate = premiumTokenValues[model].prompt;
const premiumCompletionRate = premiumTokenValues[model].completion;
- const promptMultiplier = getMultiplier({
- model,
- tokenType: 'prompt',
- inputTokenCount: totalInput,
- });
- const writeMultiplier = getCacheMultiplier({ model, cacheType: 'write' }) ?? promptMultiplier;
- const readMultiplier = getCacheMultiplier({ model, cacheType: 'read' }) ?? promptMultiplier;
+ const writeMultiplier = getCacheMultiplier({ model, cacheType: 'write' });
+ const readMultiplier = getCacheMultiplier({ model, cacheType: 'read' });
const expectedPromptCost =
tokenUsage.promptTokens.input * premiumPromptRate +
@@ -830,7 +774,7 @@ describe('Premium Token Pricing Integration Tests', () => {
const updatedBalance = await Balance.findOne({ user: userId });
expect(totalInput).toBeGreaterThan(premiumTokenValues[model].threshold);
- expect(updatedBalance?.tokenCredits).toBeCloseTo(initialBalance - expectedTotalCost, 0);
+ expect(updatedBalance.tokenCredits).toBeCloseTo(initialBalance - expectedTotalCost, 0);
});
test('spendStructuredTokens should apply standard pricing when total input tokens are below threshold', async () => {
@@ -864,13 +808,8 @@ describe('Premium Token Pricing Integration Tests', () => {
const standardPromptRate = tokenValues[model].prompt;
const standardCompletionRate = tokenValues[model].completion;
- const promptMultiplier = getMultiplier({
- model,
- tokenType: 'prompt',
- inputTokenCount: totalInput,
- });
- const writeMultiplier = getCacheMultiplier({ model, cacheType: 'write' }) ?? promptMultiplier;
- const readMultiplier = getCacheMultiplier({ model, cacheType: 'read' }) ?? promptMultiplier;
+ const writeMultiplier = getCacheMultiplier({ model, cacheType: 'write' });
+ const readMultiplier = getCacheMultiplier({ model, cacheType: 'read' });
const expectedPromptCost =
tokenUsage.promptTokens.input * standardPromptRate +
@@ -881,145 +820,7 @@ describe('Premium Token Pricing Integration Tests', () => {
const updatedBalance = await Balance.findOne({ user: userId });
expect(totalInput).toBeLessThanOrEqual(premiumTokenValues[model].threshold);
- expect(updatedBalance?.tokenCredits).toBeCloseTo(initialBalance - expectedTotalCost, 0);
- });
-
- test('spendTokens should apply standard pricing for gemini-3.1-pro-preview below threshold', async () => {
- const userId = new mongoose.Types.ObjectId();
- const initialBalance = 100000000;
- await Balance.create({ user: userId, tokenCredits: initialBalance });
-
- const model = 'gemini-3.1-pro-preview';
- const promptTokens = 100000;
- const completionTokens = 500;
-
- const txData = {
- user: userId,
- conversationId: 'test-gemini31-below',
- model,
- context: 'test',
- endpointTokenConfig: null,
- balance: { enabled: true },
- };
-
- await spendTokens(txData, { promptTokens, completionTokens });
-
- const standardPromptRate = tokenValues['gemini-3.1'].prompt;
- const standardCompletionRate = tokenValues['gemini-3.1'].completion;
- const expectedCost =
- promptTokens * standardPromptRate + completionTokens * standardCompletionRate;
-
- const updatedBalance = await Balance.findOne({ user: userId });
- expect(updatedBalance?.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
- });
-
- test('spendTokens should apply premium pricing for gemini-3.1-pro-preview above threshold', async () => {
- const userId = new mongoose.Types.ObjectId();
- const initialBalance = 100000000;
- await Balance.create({ user: userId, tokenCredits: initialBalance });
-
- const model = 'gemini-3.1-pro-preview';
- const promptTokens = 250000;
- const completionTokens = 500;
-
- const txData = {
- user: userId,
- conversationId: 'test-gemini31-above',
- model,
- context: 'test',
- endpointTokenConfig: null,
- balance: { enabled: true },
- };
-
- await spendTokens(txData, { promptTokens, completionTokens });
-
- const premiumPromptRate = premiumTokenValues['gemini-3.1'].prompt;
- const premiumCompletionRate = premiumTokenValues['gemini-3.1'].completion;
- const expectedCost =
- promptTokens * premiumPromptRate + completionTokens * premiumCompletionRate;
-
- const updatedBalance = await Balance.findOne({ user: userId });
- expect(updatedBalance?.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
- });
-
- test('spendTokens should apply standard pricing for gemini-3.1-pro-preview at exactly the threshold', async () => {
- const userId = new mongoose.Types.ObjectId();
- const initialBalance = 100000000;
- await Balance.create({ user: userId, tokenCredits: initialBalance });
-
- const model = 'gemini-3.1-pro-preview';
- const promptTokens = premiumTokenValues['gemini-3.1'].threshold;
- const completionTokens = 500;
-
- const txData = {
- user: userId,
- conversationId: 'test-gemini31-exact',
- model,
- context: 'test',
- endpointTokenConfig: null,
- balance: { enabled: true },
- };
-
- await spendTokens(txData, { promptTokens, completionTokens });
-
- const standardPromptRate = tokenValues['gemini-3.1'].prompt;
- const standardCompletionRate = tokenValues['gemini-3.1'].completion;
- const expectedCost =
- promptTokens * standardPromptRate + completionTokens * standardCompletionRate;
-
- const updatedBalance = await Balance.findOne({ user: userId });
- expect(updatedBalance?.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
- });
-
- test('spendStructuredTokens should apply premium pricing for gemini-3.1 when total input exceeds threshold', async () => {
- const userId = new mongoose.Types.ObjectId();
- const initialBalance = 100000000;
- await Balance.create({ user: userId, tokenCredits: initialBalance });
-
- const model = 'gemini-3.1-pro-preview';
- const txData = {
- user: userId,
- conversationId: 'test-gemini31-structured-premium',
- model,
- context: 'message',
- endpointTokenConfig: null,
- balance: { enabled: true },
- };
-
- const tokenUsage = {
- promptTokens: {
- input: 200000,
- write: 10000,
- read: 5000,
- },
- completionTokens: 1000,
- };
-
- const totalInput =
- tokenUsage.promptTokens.input + tokenUsage.promptTokens.write + tokenUsage.promptTokens.read;
-
- await spendStructuredTokens(txData, tokenUsage);
-
- const premiumPromptRate = premiumTokenValues['gemini-3.1'].prompt;
- const premiumCompletionRate = premiumTokenValues['gemini-3.1'].completion;
- const promptMultiplier = getMultiplier({
- model,
- tokenType: 'prompt',
- inputTokenCount: totalInput,
- });
- const writeMultiplier = getCacheMultiplier({ model, cacheType: 'write' }) ?? promptMultiplier;
- const readMultiplier = getCacheMultiplier({ model, cacheType: 'read' }) ?? promptMultiplier;
-
- const expectedPromptCost =
- tokenUsage.promptTokens.input * premiumPromptRate +
- tokenUsage.promptTokens.write * writeMultiplier +
- tokenUsage.promptTokens.read * readMultiplier;
- const expectedCompletionCost = tokenUsage.completionTokens * premiumCompletionRate;
- const expectedTotalCost = expectedPromptCost + expectedCompletionCost;
-
- const updatedBalance = await Balance.findOne({ user: userId });
- expect(totalInput).toBeGreaterThan(premiumTokenValues['gemini-3.1'].threshold);
- expect(updatedBalance?.tokenCredits).toBeCloseTo(initialBalance - expectedTotalCost, 0);
+ expect(updatedBalance.tokenCredits).toBeCloseTo(initialBalance - expectedTotalCost, 0);
});
test('non-premium models should not be affected by inputTokenCount regardless of prompt size', async () => {
@@ -1048,6 +849,6 @@ describe('Premium Token Pricing Integration Tests', () => {
promptTokens * standardPromptRate + completionTokens * standardCompletionRate;
const updatedBalance = await Balance.findOne({ user: userId });
- expect(updatedBalance?.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
+ expect(updatedBalance.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
});
});
diff --git a/api/models/balanceMethods.js b/api/models/balanceMethods.js
new file mode 100644
index 0000000000..e614872eac
--- /dev/null
+++ b/api/models/balanceMethods.js
@@ -0,0 +1,156 @@
+const { logger } = require('@librechat/data-schemas');
+const { ViolationTypes } = require('librechat-data-provider');
+const { createAutoRefillTransaction } = require('./Transaction');
+const { logViolation } = require('~/cache');
+const { getMultiplier } = require('./tx');
+const { Balance } = require('~/db/models');
+
+function isInvalidDate(date) {
+ return isNaN(date);
+}
+
+/**
+ * Simple check method that calculates token cost and returns balance info.
+ * The auto-refill logic has been moved to balanceMethods.js to prevent circular dependencies.
+ */
+const checkBalanceRecord = async function ({
+ user,
+ model,
+ endpoint,
+ valueKey,
+ tokenType,
+ amount,
+ endpointTokenConfig,
+}) {
+ const multiplier = getMultiplier({ valueKey, tokenType, model, endpoint, endpointTokenConfig });
+ const tokenCost = amount * multiplier;
+
+ // Retrieve the balance record
+ let record = await Balance.findOne({ user }).lean();
+ if (!record) {
+ logger.debug('[Balance.check] No balance record found for user', { user });
+ return {
+ canSpend: false,
+ balance: 0,
+ tokenCost,
+ };
+ }
+ let balance = record.tokenCredits;
+
+ logger.debug('[Balance.check] Initial state', {
+ user,
+ model,
+ endpoint,
+ valueKey,
+ tokenType,
+ amount,
+ balance,
+ multiplier,
+ endpointTokenConfig: !!endpointTokenConfig,
+ });
+
+ // Only perform auto-refill if spending would bring the balance to 0 or below
+ if (balance - tokenCost <= 0 && record.autoRefillEnabled && record.refillAmount > 0) {
+ const lastRefillDate = new Date(record.lastRefill);
+ const now = new Date();
+ if (
+ isInvalidDate(lastRefillDate) ||
+ now >=
+ addIntervalToDate(lastRefillDate, record.refillIntervalValue, record.refillIntervalUnit)
+ ) {
+ try {
+ /** @type {{ rate: number, user: string, balance: number, transaction: import('@librechat/data-schemas').ITransaction}} */
+ const result = await createAutoRefillTransaction({
+ user: user,
+ tokenType: 'credits',
+ context: 'autoRefill',
+ rawAmount: record.refillAmount,
+ });
+ balance = result.balance;
+ } catch (error) {
+ logger.error('[Balance.check] Failed to record transaction for auto-refill', error);
+ }
+ }
+ }
+
+ logger.debug('[Balance.check] Token cost', { tokenCost });
+ return { canSpend: balance >= tokenCost, balance, tokenCost };
+};
+
+/**
+ * Adds a time interval to a given date.
+ * @param {Date} date - The starting date.
+ * @param {number} value - The numeric value of the interval.
+ * @param {'seconds'|'minutes'|'hours'|'days'|'weeks'|'months'} unit - The unit of time.
+ * @returns {Date} A new Date representing the starting date plus the interval.
+ */
+const addIntervalToDate = (date, value, unit) => {
+ const result = new Date(date);
+ switch (unit) {
+ case 'seconds':
+ result.setSeconds(result.getSeconds() + value);
+ break;
+ case 'minutes':
+ result.setMinutes(result.getMinutes() + value);
+ break;
+ case 'hours':
+ result.setHours(result.getHours() + value);
+ break;
+ case 'days':
+ result.setDate(result.getDate() + value);
+ break;
+ case 'weeks':
+ result.setDate(result.getDate() + value * 7);
+ break;
+ case 'months':
+ result.setMonth(result.getMonth() + value);
+ break;
+ default:
+ break;
+ }
+ return result;
+};
+
+/**
+ * Checks the balance for a user and determines if they can spend a certain amount.
+ * If the user cannot spend the amount, it logs a violation and denies the request.
+ *
+ * @async
+ * @function
+ * @param {Object} params - The function parameters.
+ * @param {ServerRequest} params.req - The Express request object.
+ * @param {Express.Response} params.res - The Express response object.
+ * @param {Object} params.txData - The transaction data.
+ * @param {string} params.txData.user - The user ID or identifier.
+ * @param {('prompt' | 'completion')} params.txData.tokenType - The type of token.
+ * @param {number} params.txData.amount - The amount of tokens.
+ * @param {string} params.txData.model - The model name or identifier.
+ * @param {string} [params.txData.endpointTokenConfig] - The token configuration for the endpoint.
+ * @returns {Promise} Throws error if the user cannot spend the amount.
+ * @throws {Error} Throws an error if there's an issue with the balance check.
+ */
+const checkBalance = async ({ req, res, txData }) => {
+ const { canSpend, balance, tokenCost } = await checkBalanceRecord(txData);
+ if (canSpend) {
+ return true;
+ }
+
+ const type = ViolationTypes.TOKEN_BALANCE;
+ const errorMessage = {
+ type,
+ balance,
+ tokenCost,
+ promptTokens: txData.amount,
+ };
+
+ if (txData.generations && txData.generations.length > 0) {
+ errorMessage.generations = txData.generations;
+ }
+
+ await logViolation(req, res, type, errorMessage, 0);
+ throw new Error(JSON.stringify(errorMessage));
+};
+
+module.exports = {
+ checkBalance,
+};
diff --git a/packages/data-schemas/src/methods/convoStructure.spec.ts b/api/models/convoStructure.spec.js
similarity index 69%
rename from packages/data-schemas/src/methods/convoStructure.spec.ts
rename to api/models/convoStructure.spec.js
index 77a9913233..440f21cb06 100644
--- a/packages/data-schemas/src/methods/convoStructure.spec.ts
+++ b/api/models/convoStructure.spec.js
@@ -1,35 +1,13 @@
-import mongoose from 'mongoose';
-import type { TMessage } from 'librechat-data-provider';
-import { buildTree } from 'librechat-data-provider';
-import { MongoMemoryServer } from 'mongodb-memory-server';
-import { createModels } from '~/models';
-import { createMessageMethods } from './message';
-import type { IMessage } from '..';
-
-jest.mock('~/config/winston', () => ({
- error: jest.fn(),
- warn: jest.fn(),
- info: jest.fn(),
- debug: jest.fn(),
-}));
-
-let mongod: InstanceType;
-let Message: mongoose.Model;
-let getMessages: ReturnType['getMessages'];
-let bulkSaveMessages: ReturnType['bulkSaveMessages'];
+const mongoose = require('mongoose');
+const { buildTree } = require('librechat-data-provider');
+const { MongoMemoryServer } = require('mongodb-memory-server');
+const { getMessages, bulkSaveMessages } = require('./Message');
+const { Message } = require('~/db/models');
+let mongod;
beforeAll(async () => {
mongod = await MongoMemoryServer.create();
const uri = mongod.getUri();
-
- const models = createModels(mongoose);
- Object.assign(mongoose.models, models);
- Message = mongoose.models.Message;
-
- const methods = createMessageMethods(mongoose);
- getMessages = methods.getMessages;
- bulkSaveMessages = methods.bulkSaveMessages;
-
await mongoose.connect(uri);
});
@@ -83,13 +61,11 @@ describe('Conversation Structure Tests', () => {
// Add common properties to all messages
messages.forEach((msg) => {
- Object.assign(msg, {
- conversationId,
- user: userId,
- isCreatedByUser: false,
- error: false,
- unfinished: false,
- });
+ msg.conversationId = conversationId;
+ msg.user = userId;
+ msg.isCreatedByUser = false;
+ msg.error = false;
+ msg.unfinished = false;
});
// Save messages with overrideTimestamp omitted (default is false)
@@ -99,10 +75,10 @@ describe('Conversation Structure Tests', () => {
const retrievedMessages = await getMessages({ conversationId, user: userId });
// Build tree
- const tree = buildTree({ messages: retrievedMessages as TMessage[] });
+ const tree = buildTree({ messages: retrievedMessages });
// Check if the tree is incorrect (folded/corrupted)
- expect(tree!.length).toBeGreaterThan(1); // Should have multiple root messages, indicating corruption
+ expect(tree.length).toBeGreaterThan(1); // Should have multiple root messages, indicating corruption
});
test('Fix: Conversation structure maintained with more than 16 messages', async () => {
@@ -126,17 +102,17 @@ describe('Conversation Structure Tests', () => {
const retrievedMessages = await getMessages({ conversationId, user: userId });
// Build tree
- const tree = buildTree({ messages: retrievedMessages as TMessage[] });
+ const tree = buildTree({ messages: retrievedMessages });
// Check if the tree is correct
- expect(tree!.length).toBe(1); // Should have only one root message
- let currentNode = tree![0];
+ expect(tree.length).toBe(1); // Should have only one root message
+ let currentNode = tree[0];
for (let i = 1; i < 20; i++) {
- expect(currentNode.children!.length).toBe(1);
- currentNode = currentNode.children![0];
+ expect(currentNode.children.length).toBe(1);
+ currentNode = currentNode.children[0];
expect(currentNode.text).toBe(`Message ${i}`);
}
- expect(currentNode.children!.length).toBe(0); // Last message should have no children
+ expect(currentNode.children.length).toBe(0); // Last message should have no children
});
test('Simulate MongoDB ordering issue with more than 16 messages and close timestamps', async () => {
@@ -155,13 +131,15 @@ describe('Conversation Structure Tests', () => {
// Add common properties to all messages
messages.forEach((msg) => {
- Object.assign(msg, { isCreatedByUser: false, error: false, unfinished: false });
+ msg.isCreatedByUser = false;
+ msg.error = false;
+ msg.unfinished = false;
});
await bulkSaveMessages(messages, true);
const retrievedMessages = await getMessages({ conversationId, user: userId });
- const tree = buildTree({ messages: retrievedMessages as TMessage[] });
- expect(tree!.length).toBeGreaterThan(1);
+ const tree = buildTree({ messages: retrievedMessages });
+ expect(tree.length).toBeGreaterThan(1);
});
test('Fix: Preserve order with more than 16 messages by maintaining original timestamps', async () => {
@@ -180,7 +158,9 @@ describe('Conversation Structure Tests', () => {
// Add common properties to all messages
messages.forEach((msg) => {
- Object.assign(msg, { isCreatedByUser: false, error: false, unfinished: false });
+ msg.isCreatedByUser = false;
+ msg.error = false;
+ msg.unfinished = false;
});
// Save messages with overriding timestamps (preserve original timestamps)
@@ -190,17 +170,17 @@ describe('Conversation Structure Tests', () => {
const retrievedMessages = await getMessages({ conversationId, user: userId });
// Build tree
- const tree = buildTree({ messages: retrievedMessages as TMessage[] });
+ const tree = buildTree({ messages: retrievedMessages });
// Check if the tree is correct
- expect(tree!.length).toBe(1); // Should have only one root message
- let currentNode = tree![0];
+ expect(tree.length).toBe(1); // Should have only one root message
+ let currentNode = tree[0];
for (let i = 1; i < 20; i++) {
- expect(currentNode.children!.length).toBe(1);
- currentNode = currentNode.children![0];
+ expect(currentNode.children.length).toBe(1);
+ currentNode = currentNode.children[0];
expect(currentNode.text).toBe(`Message ${i}`);
}
- expect(currentNode.children!.length).toBe(0); // Last message should have no children
+ expect(currentNode.children.length).toBe(0); // Last message should have no children
});
test('Random order dates between parent and children messages', async () => {
@@ -237,13 +217,11 @@ describe('Conversation Structure Tests', () => {
// Add common properties to all messages
messages.forEach((msg) => {
- Object.assign(msg, {
- conversationId,
- user: userId,
- isCreatedByUser: false,
- error: false,
- unfinished: false,
- });
+ msg.conversationId = conversationId;
+ msg.user = userId;
+ msg.isCreatedByUser = false;
+ msg.error = false;
+ msg.unfinished = false;
});
// Save messages with overrideTimestamp set to true
@@ -263,16 +241,16 @@ describe('Conversation Structure Tests', () => {
);
// Build tree
- const tree = buildTree({ messages: retrievedMessages as TMessage[] });
+ const tree = buildTree({ messages: retrievedMessages });
// Debug log to see the tree structure
console.log(
'Tree structure:',
- tree!.map((root) => ({
+ tree.map((root) => ({
messageId: root.messageId,
- children: root.children!.map((child) => ({
+ children: root.children.map((child) => ({
messageId: child.messageId,
- children: child.children!.map((grandchild) => ({
+ children: child.children.map((grandchild) => ({
messageId: grandchild.messageId,
})),
})),
@@ -284,14 +262,14 @@ describe('Conversation Structure Tests', () => {
// Check if messages are properly linked
const parentMsg = retrievedMessages.find((msg) => msg.messageId === 'parent');
- expect(parentMsg!.parentMessageId).toBeNull(); // Parent should have null parentMessageId
+ expect(parentMsg.parentMessageId).toBeNull(); // Parent should have null parentMessageId
const childMsg1 = retrievedMessages.find((msg) => msg.messageId === 'child1');
- expect(childMsg1!.parentMessageId).toBe('parent');
+ expect(childMsg1.parentMessageId).toBe('parent');
// Then check tree structure
- expect(tree!.length).toBe(1); // Should have only one root message
- expect(tree![0].messageId).toBe('parent');
- expect(tree![0].children!.length).toBe(2); // Should have two children
+ expect(tree.length).toBe(1); // Should have only one root message
+ expect(tree[0].messageId).toBe('parent');
+ expect(tree[0].children.length).toBe(2); // Should have two children
});
});
diff --git a/api/models/index.js b/api/models/index.js
index 2a1cb222f9..d0b10be079 100644
--- a/api/models/index.js
+++ b/api/models/index.js
@@ -1,22 +1,48 @@
const mongoose = require('mongoose');
const { createMethods } = require('@librechat/data-schemas');
-const { matchModelName, findMatchingPattern } = require('@librechat/api');
-const getLogStores = require('~/cache/getLogStores');
-
-const methods = createMethods(mongoose, {
- matchModelName,
- findMatchingPattern,
- getCache: getLogStores,
-});
+const methods = createMethods(mongoose);
+const { comparePassword } = require('./userMethods');
+const {
+ getMessage,
+ getMessages,
+ saveMessage,
+ recordMessage,
+ updateMessage,
+ deleteMessagesSince,
+ deleteMessages,
+} = require('./Message');
+const { getConvoTitle, getConvo, saveConvo, deleteConvos } = require('./Conversation');
+const { getPreset, getPresets, savePreset, deletePresets } = require('./Preset');
+const { File } = require('~/db/models');
const seedDatabase = async () => {
await methods.initializeRoles();
await methods.seedDefaultRoles();
await methods.ensureDefaultCategories();
- await methods.seedSystemGrants();
};
module.exports = {
...methods,
seedDatabase,
+ comparePassword,
+
+ getMessage,
+ getMessages,
+ saveMessage,
+ recordMessage,
+ updateMessage,
+ deleteMessagesSince,
+ deleteMessages,
+
+ getConvoTitle,
+ getConvo,
+ saveConvo,
+ deleteConvos,
+
+ getPreset,
+ getPresets,
+ savePreset,
+ deletePresets,
+
+ Files: File,
};
diff --git a/api/models/interface.js b/api/models/interface.js
new file mode 100644
index 0000000000..a79a8e747f
--- /dev/null
+++ b/api/models/interface.js
@@ -0,0 +1,24 @@
+const { logger } = require('@librechat/data-schemas');
+const { updateInterfacePermissions: updateInterfacePerms } = require('@librechat/api');
+const { getRoleByName, updateAccessPermissions } = require('./Role');
+
+/**
+ * Update interface permissions based on app configuration.
+ * Must be done independently from loading the app config.
+ * @param {AppConfig} appConfig
+ */
+async function updateInterfacePermissions(appConfig) {
+ try {
+ await updateInterfacePerms({
+ appConfig,
+ getRoleByName,
+ updateAccessPermissions,
+ });
+ } catch (error) {
+ logger.error('Error updating interface permissions:', error);
+ }
+}
+
+module.exports = {
+ updateInterfacePermissions,
+};
diff --git a/api/models/inviteUser.js b/api/models/inviteUser.js
new file mode 100644
index 0000000000..eda8394225
--- /dev/null
+++ b/api/models/inviteUser.js
@@ -0,0 +1,68 @@
+const mongoose = require('mongoose');
+const { logger, hashToken, getRandomValues } = require('@librechat/data-schemas');
+const { createToken, findToken } = require('~/models');
+
+/**
+ * @module inviteUser
+ * @description This module provides functions to create and get user invites
+ */
+
+/**
+ * @function createInvite
+ * @description This function creates a new user invite
+ * @param {string} email - The email of the user to invite
+ * @returns {Promise} A promise that resolves to the saved invite document
+ * @throws {Error} If there is an error creating the invite
+ */
+const createInvite = async (email) => {
+ try {
+ const token = await getRandomValues(32);
+ const hash = await hashToken(token);
+ const encodedToken = encodeURIComponent(token);
+
+ const fakeUserId = new mongoose.Types.ObjectId();
+
+ await createToken({
+ userId: fakeUserId,
+ email,
+ token: hash,
+ createdAt: Date.now(),
+ expiresIn: 604800,
+ });
+
+ return encodedToken;
+ } catch (error) {
+ logger.error('[createInvite] Error creating invite', error);
+ return { message: 'Error creating invite' };
+ }
+};
+
+/**
+ * @function getInvite
+ * @description This function retrieves a user invite
+ * @param {string} encodedToken - The token of the invite to retrieve
+ * @param {string} email - The email of the user to validate
+ * @returns {Promise} A promise that resolves to the retrieved invite document
+ * @throws {Error} If there is an error retrieving the invite, if the invite does not exist, or if the email does not match
+ */
+const getInvite = async (encodedToken, email) => {
+ try {
+ const token = decodeURIComponent(encodedToken);
+ const hash = await hashToken(token);
+ const invite = await findToken({ token: hash, email });
+
+ if (!invite) {
+ throw new Error('Invite not found or email does not match');
+ }
+
+ return invite;
+ } catch (error) {
+ logger.error('[getInvite] Error getting invite:', error);
+ return { error: true, message: error.message };
+ }
+};
+
+module.exports = {
+ createInvite,
+ getInvite,
+};
diff --git a/api/models/loadAddedAgent.js b/api/models/loadAddedAgent.js
new file mode 100644
index 0000000000..aa83375eae
--- /dev/null
+++ b/api/models/loadAddedAgent.js
@@ -0,0 +1,218 @@
+const { logger } = require('@librechat/data-schemas');
+const { getCustomEndpointConfig } = require('@librechat/api');
+const {
+ Tools,
+ Constants,
+ isAgentsEndpoint,
+ isEphemeralAgentId,
+ appendAgentIdSuffix,
+ encodeEphemeralAgentId,
+} = require('librechat-data-provider');
+const { getMCPServerTools } = require('~/server/services/Config');
+
+const { mcp_all, mcp_delimiter } = Constants;
+
+/**
+ * Constant for added conversation agent ID
+ */
+const ADDED_AGENT_ID = 'added_agent';
+
+/**
+ * Get an agent document based on the provided ID.
+ * @param {Object} searchParameter - The search parameters to find the agent.
+ * @param {string} searchParameter.id - The ID of the agent.
+ * @returns {Promise}
+ */
+let getAgent;
+
+/**
+ * Set the getAgent function (dependency injection to avoid circular imports)
+ * @param {Function} fn
+ */
+const setGetAgent = (fn) => {
+ getAgent = fn;
+};
+
+/**
+ * Load an agent from an added conversation (TConversation).
+ * Used for multi-convo parallel agent execution.
+ *
+ * @param {Object} params
+ * @param {import('express').Request} params.req
+ * @param {import('librechat-data-provider').TConversation} params.conversation - The added conversation
+ * @param {import('librechat-data-provider').Agent} [params.primaryAgent] - The primary agent (used to duplicate tools when both are ephemeral)
+ * @returns {Promise} The agent config as a plain object, or null if invalid.
+ */
+const loadAddedAgent = async ({ req, conversation, primaryAgent }) => {
+ if (!conversation) {
+ return null;
+ }
+
+ // If there's an agent_id, load the existing agent
+ if (conversation.agent_id && !isEphemeralAgentId(conversation.agent_id)) {
+ if (!getAgent) {
+ throw new Error('getAgent not initialized - call setGetAgent first');
+ }
+ const agent = await getAgent({
+ id: conversation.agent_id,
+ });
+
+ if (!agent) {
+ logger.warn(`[loadAddedAgent] Agent ${conversation.agent_id} not found`);
+ return null;
+ }
+
+ agent.version = agent.versions ? agent.versions.length : 0;
+ // Append suffix to distinguish from primary agent (matches ephemeral format)
+ // This is needed when both agents have the same ID or for consistent parallel content attribution
+ agent.id = appendAgentIdSuffix(agent.id, 1);
+ return agent;
+ }
+
+ // Otherwise, create an ephemeral agent config from the conversation
+ const { model, endpoint, promptPrefix, spec, ...rest } = conversation;
+
+ if (!endpoint || !model) {
+ logger.warn('[loadAddedAgent] Missing required endpoint or model for ephemeral agent');
+ return null;
+ }
+
+ // If both primary and added agents are ephemeral, duplicate tools from primary agent
+ const primaryIsEphemeral = primaryAgent && isEphemeralAgentId(primaryAgent.id);
+ if (primaryIsEphemeral && Array.isArray(primaryAgent.tools)) {
+ // Get endpoint config and model spec for display name fallbacks
+ const appConfig = req.config;
+ let endpointConfig = appConfig?.endpoints?.[endpoint];
+ if (!isAgentsEndpoint(endpoint) && !endpointConfig) {
+ try {
+ endpointConfig = getCustomEndpointConfig({ endpoint, appConfig });
+ } catch (err) {
+ logger.error('[loadAddedAgent] Error getting custom endpoint config', err);
+ }
+ }
+
+ // Look up model spec for label fallback
+ const modelSpecs = appConfig?.modelSpecs?.list;
+ const modelSpec = spec != null && spec !== '' ? modelSpecs?.find((s) => s.name === spec) : null;
+
+ // For ephemeral agents, use modelLabel if provided, then model spec's label,
+ // then modelDisplayLabel from endpoint config, otherwise empty string to show model name
+ const sender = rest.modelLabel ?? modelSpec?.label ?? endpointConfig?.modelDisplayLabel ?? '';
+
+ const ephemeralId = encodeEphemeralAgentId({ endpoint, model, sender, index: 1 });
+
+ return {
+ id: ephemeralId,
+ instructions: promptPrefix || '',
+ provider: endpoint,
+ model_parameters: {},
+ model,
+ tools: [...primaryAgent.tools],
+ };
+ }
+
+ // Extract ephemeral agent options from conversation if present
+ const ephemeralAgent = rest.ephemeralAgent;
+ const mcpServers = new Set(ephemeralAgent?.mcp);
+ const userId = req.user?.id;
+
+ // Check model spec for MCP servers
+ const modelSpecs = req.config?.modelSpecs?.list;
+ let modelSpec = null;
+ if (spec != null && spec !== '') {
+ modelSpec = modelSpecs?.find((s) => s.name === spec) || null;
+ }
+ if (modelSpec?.mcpServers) {
+ for (const mcpServer of modelSpec.mcpServers) {
+ mcpServers.add(mcpServer);
+ }
+ }
+
+ /** @type {string[]} */
+ const tools = [];
+ if (ephemeralAgent?.execute_code === true || modelSpec?.executeCode === true) {
+ tools.push(Tools.execute_code);
+ }
+ if (ephemeralAgent?.file_search === true || modelSpec?.fileSearch === true) {
+ tools.push(Tools.file_search);
+ }
+ if (ephemeralAgent?.web_search === true || modelSpec?.webSearch === true) {
+ tools.push(Tools.web_search);
+ }
+
+ const addedServers = new Set();
+ if (mcpServers.size > 0) {
+ for (const mcpServer of mcpServers) {
+ if (addedServers.has(mcpServer)) {
+ continue;
+ }
+ const serverTools = await getMCPServerTools(userId, mcpServer);
+ if (!serverTools) {
+ tools.push(`${mcp_all}${mcp_delimiter}${mcpServer}`);
+ addedServers.add(mcpServer);
+ continue;
+ }
+ tools.push(...Object.keys(serverTools));
+ addedServers.add(mcpServer);
+ }
+ }
+
+ // Build model_parameters from conversation fields
+ const model_parameters = {};
+ const paramKeys = [
+ 'temperature',
+ 'top_p',
+ 'topP',
+ 'topK',
+ 'presence_penalty',
+ 'frequency_penalty',
+ 'maxOutputTokens',
+ 'maxTokens',
+ 'max_tokens',
+ ];
+
+ for (const key of paramKeys) {
+ if (rest[key] != null) {
+ model_parameters[key] = rest[key];
+ }
+ }
+
+ // Get endpoint config for modelDisplayLabel fallback
+ const appConfig = req.config;
+ let endpointConfig = appConfig?.endpoints?.[endpoint];
+ if (!isAgentsEndpoint(endpoint) && !endpointConfig) {
+ try {
+ endpointConfig = getCustomEndpointConfig({ endpoint, appConfig });
+ } catch (err) {
+ logger.error('[loadAddedAgent] Error getting custom endpoint config', err);
+ }
+ }
+
+ // For ephemeral agents, use modelLabel if provided, then model spec's label,
+ // then modelDisplayLabel from endpoint config, otherwise empty string to show model name
+ const sender = rest.modelLabel ?? modelSpec?.label ?? endpointConfig?.modelDisplayLabel ?? '';
+
+ /** Encoded ephemeral agent ID with endpoint, model, sender, and index=1 to distinguish from primary */
+ const ephemeralId = encodeEphemeralAgentId({ endpoint, model, sender, index: 1 });
+
+ const result = {
+ id: ephemeralId,
+ instructions: promptPrefix || '',
+ provider: endpoint,
+ model_parameters,
+ model,
+ tools,
+ };
+
+ if (ephemeralAgent?.artifacts != null && ephemeralAgent.artifacts) {
+ result.artifacts = ephemeralAgent.artifacts;
+ }
+
+ return result;
+};
+
+module.exports = {
+ ADDED_AGENT_ID,
+ loadAddedAgent,
+ setGetAgent,
+};
diff --git a/api/models/spendTokens.js b/api/models/spendTokens.js
new file mode 100644
index 0000000000..afe05969d8
--- /dev/null
+++ b/api/models/spendTokens.js
@@ -0,0 +1,140 @@
+const { logger } = require('@librechat/data-schemas');
+const { createTransaction, createStructuredTransaction } = require('./Transaction');
+/**
+ * Creates up to two transactions to record the spending of tokens.
+ *
+ * @function
+ * @async
+ * @param {txData} txData - Transaction data.
+ * @param {Object} tokenUsage - The number of tokens used.
+ * @param {Number} tokenUsage.promptTokens - The number of prompt tokens used.
+ * @param {Number} tokenUsage.completionTokens - The number of completion tokens used.
+ * @returns {Promise} - Returns nothing.
+ * @throws {Error} - Throws an error if there's an issue creating the transactions.
+ */
+const spendTokens = async (txData, tokenUsage) => {
+ const { promptTokens, completionTokens } = tokenUsage;
+ logger.debug(
+ `[spendTokens] conversationId: ${txData.conversationId}${
+ txData?.context ? ` | Context: ${txData?.context}` : ''
+ } | Token usage: `,
+ {
+ promptTokens,
+ completionTokens,
+ },
+ );
+ let prompt, completion;
+ const normalizedPromptTokens = Math.max(promptTokens ?? 0, 0);
+ try {
+ if (promptTokens !== undefined) {
+ prompt = await createTransaction({
+ ...txData,
+ tokenType: 'prompt',
+ rawAmount: promptTokens === 0 ? 0 : -normalizedPromptTokens,
+ inputTokenCount: normalizedPromptTokens,
+ });
+ }
+
+ if (completionTokens !== undefined) {
+ completion = await createTransaction({
+ ...txData,
+ tokenType: 'completion',
+ rawAmount: completionTokens === 0 ? 0 : -Math.max(completionTokens, 0),
+ inputTokenCount: normalizedPromptTokens,
+ });
+ }
+
+ if (prompt || completion) {
+ logger.debug('[spendTokens] Transaction data record against balance:', {
+ user: txData.user,
+ prompt: prompt?.prompt,
+ promptRate: prompt?.rate,
+ completion: completion?.completion,
+ completionRate: completion?.rate,
+ balance: completion?.balance ?? prompt?.balance,
+ });
+ } else {
+ logger.debug('[spendTokens] No transactions incurred against balance');
+ }
+ } catch (err) {
+ logger.error('[spendTokens]', err);
+ }
+};
+
+/**
+ * Creates transactions to record the spending of structured tokens.
+ *
+ * @function
+ * @async
+ * @param {txData} txData - Transaction data.
+ * @param {Object} tokenUsage - The number of tokens used.
+ * @param {Object} tokenUsage.promptTokens - The number of prompt tokens used.
+ * @param {Number} tokenUsage.promptTokens.input - The number of input tokens.
+ * @param {Number} tokenUsage.promptTokens.write - The number of write tokens.
+ * @param {Number} tokenUsage.promptTokens.read - The number of read tokens.
+ * @param {Number} tokenUsage.completionTokens - The number of completion tokens used.
+ * @returns {Promise} - Returns nothing.
+ * @throws {Error} - Throws an error if there's an issue creating the transactions.
+ */
+const spendStructuredTokens = async (txData, tokenUsage) => {
+ const { promptTokens, completionTokens } = tokenUsage;
+ logger.debug(
+ `[spendStructuredTokens] conversationId: ${txData.conversationId}${
+ txData?.context ? ` | Context: ${txData?.context}` : ''
+ } | Token usage: `,
+ {
+ promptTokens,
+ completionTokens,
+ },
+ );
+ let prompt, completion;
+ try {
+ if (promptTokens) {
+ const input = Math.max(promptTokens.input ?? 0, 0);
+ const write = Math.max(promptTokens.write ?? 0, 0);
+ const read = Math.max(promptTokens.read ?? 0, 0);
+ const totalInputTokens = input + write + read;
+ prompt = await createStructuredTransaction({
+ ...txData,
+ tokenType: 'prompt',
+ inputTokens: -input,
+ writeTokens: -write,
+ readTokens: -read,
+ inputTokenCount: totalInputTokens,
+ });
+ }
+
+ if (completionTokens) {
+ const totalInputTokens = promptTokens
+ ? Math.max(promptTokens.input ?? 0, 0) +
+ Math.max(promptTokens.write ?? 0, 0) +
+ Math.max(promptTokens.read ?? 0, 0)
+ : undefined;
+ completion = await createTransaction({
+ ...txData,
+ tokenType: 'completion',
+ rawAmount: -Math.max(completionTokens, 0),
+ inputTokenCount: totalInputTokens,
+ });
+ }
+
+ if (prompt || completion) {
+ logger.debug('[spendStructuredTokens] Transaction data record against balance:', {
+ user: txData.user,
+ prompt: prompt?.prompt,
+ promptRate: prompt?.rate,
+ completion: completion?.completion,
+ completionRate: completion?.rate,
+ balance: completion?.balance ?? prompt?.balance,
+ });
+ } else {
+ logger.debug('[spendStructuredTokens] No transactions incurred against balance');
+ }
+ } catch (err) {
+ logger.error('[spendStructuredTokens]', err);
+ }
+
+ return { prompt, completion };
+};
+
+module.exports = { spendTokens, spendStructuredTokens };
diff --git a/packages/data-schemas/src/methods/spendTokens.spec.ts b/api/models/spendTokens.spec.js
similarity index 76%
rename from packages/data-schemas/src/methods/spendTokens.spec.ts
rename to api/models/spendTokens.spec.js
index d505663d57..c076d29700 100644
--- a/packages/data-schemas/src/methods/spendTokens.spec.ts
+++ b/api/models/spendTokens.spec.js
@@ -1,60 +1,30 @@
-import mongoose from 'mongoose';
-import { MongoMemoryServer } from 'mongodb-memory-server';
-import { matchModelName, findMatchingPattern } from './test-helpers';
-import { createModels } from '~/models';
-import { createTxMethods, tokenValues, premiumTokenValues } from './tx';
-import { createTransactionMethods } from './transaction';
-import { createSpendTokensMethods } from './spendTokens';
-import type { ITransaction } from '~/schema/transaction';
-import type { IBalance } from '..';
+const mongoose = require('mongoose');
+const { MongoMemoryServer } = require('mongodb-memory-server');
+const { createTransaction, createAutoRefillTransaction } = require('./Transaction');
+const { tokenValues, premiumTokenValues, getCacheMultiplier } = require('./tx');
+const { spendTokens, spendStructuredTokens } = require('./spendTokens');
-jest.mock('~/config/winston', () => ({
- error: jest.fn(),
- warn: jest.fn(),
- info: jest.fn(),
- debug: jest.fn(),
+require('~/db/models');
+
+jest.mock('~/config', () => ({
+ logger: {
+ debug: jest.fn(),
+ error: jest.fn(),
+ },
}));
-let mongoServer: InstanceType;
-let spendTokens: ReturnType['spendTokens'];
-let spendStructuredTokens: ReturnType['spendStructuredTokens'];
-let createTransaction: ReturnType['createTransaction'];
-let createAutoRefillTransaction: ReturnType<
- typeof createTransactionMethods
->['createAutoRefillTransaction'];
-let getCacheMultiplier: ReturnType['getCacheMultiplier'];
-
describe('spendTokens', () => {
- let userId: mongoose.Types.ObjectId;
- let Transaction: mongoose.Model;
- let Balance: mongoose.Model;
+ let mongoServer;
+ let userId;
+ let Transaction;
+ let Balance;
beforeAll(async () => {
mongoServer = await MongoMemoryServer.create();
await mongoose.connect(mongoServer.getUri());
- const models = createModels(mongoose);
- Object.assign(mongoose.models, models);
-
- Transaction = mongoose.models.Transaction;
- Balance = mongoose.models.Balance;
-
- const txMethods = createTxMethods(mongoose, { matchModelName, findMatchingPattern });
- getCacheMultiplier = txMethods.getCacheMultiplier;
-
- const transactionMethods = createTransactionMethods(mongoose, {
- getMultiplier: txMethods.getMultiplier,
- getCacheMultiplier: txMethods.getCacheMultiplier,
- });
- createTransaction = transactionMethods.createTransaction;
- createAutoRefillTransaction = transactionMethods.createAutoRefillTransaction;
-
- const spendMethods = createSpendTokensMethods(mongoose, {
- createTransaction: transactionMethods.createTransaction,
- createStructuredTransaction: transactionMethods.createStructuredTransaction,
- });
- spendTokens = spendMethods.spendTokens;
- spendStructuredTokens = spendMethods.spendStructuredTokens;
+ Transaction = mongoose.model('Transaction');
+ Balance = mongoose.model('Balance');
});
afterAll(async () => {
@@ -109,7 +79,7 @@ describe('spendTokens', () => {
// Verify balance was updated
const balance = await Balance.findOne({ user: userId });
expect(balance).toBeDefined();
- expect(balance!.tokenCredits).toBeLessThan(10000); // Balance should be reduced
+ expect(balance.tokenCredits).toBeLessThan(10000); // Balance should be reduced
});
it('should handle zero completion tokens', async () => {
@@ -141,7 +111,7 @@ describe('spendTokens', () => {
expect(transactions[0].tokenType).toBe('completion');
// In JavaScript -0 and 0 are different but functionally equivalent
// Use Math.abs to handle both 0 and -0
- expect(Math.abs(transactions[0].rawAmount!)).toBe(0);
+ expect(Math.abs(transactions[0].rawAmount)).toBe(0);
// Check prompt transaction
expect(transactions[1].tokenType).toBe('prompt');
@@ -193,7 +163,7 @@ describe('spendTokens', () => {
// Verify balance was not updated (should still be 10000)
const balance = await Balance.findOne({ user: userId });
- expect(balance!.tokenCredits).toBe(10000);
+ expect(balance.tokenCredits).toBe(10000);
});
it('should not allow balance to go below zero when spending tokens', async () => {
@@ -226,7 +196,7 @@ describe('spendTokens', () => {
// Verify balance was reduced to exactly 0, not negative
const balance = await Balance.findOne({ user: userId });
expect(balance).toBeDefined();
- expect(balance!.tokenCredits).toBe(0);
+ expect(balance.tokenCredits).toBe(0);
// Check that the transaction records show the adjusted values
const transactionResults = await Promise.all(
@@ -274,7 +244,7 @@ describe('spendTokens', () => {
// Check balance after first transaction
let balance = await Balance.findOne({ user: userId });
- expect(balance!.tokenCredits).toBe(0);
+ expect(balance.tokenCredits).toBe(0);
// Second transaction - should keep balance at 0, not make it negative or increase it
const txData2 = {
@@ -294,7 +264,7 @@ describe('spendTokens', () => {
// Check balance after second transaction - should still be 0
balance = await Balance.findOne({ user: userId });
- expect(balance!.tokenCredits).toBe(0);
+ expect(balance.tokenCredits).toBe(0);
// Verify all transactions were created
const transactions = await Transaction.find({ user: userId });
@@ -305,7 +275,7 @@ describe('spendTokens', () => {
// Log the transaction details for debugging
console.log('Transaction details:');
- transactionDetails.forEach((tx, i: number) => {
+ transactionDetails.forEach((tx, i) => {
console.log(`Transaction ${i + 1}:`, {
tokenType: tx.tokenType,
rawAmount: tx.rawAmount,
@@ -329,7 +299,7 @@ describe('spendTokens', () => {
console.log('Direct Transaction.create result:', directResult);
// The completion value should never be positive
- expect(directResult!.completion).not.toBeGreaterThan(0);
+ expect(directResult.completion).not.toBeGreaterThan(0);
});
it('should ensure tokenValue is always negative for spending tokens', async () => {
@@ -401,7 +371,7 @@ describe('spendTokens', () => {
// Check balance after first transaction
let balance = await Balance.findOne({ user: userId });
- expect(balance!.tokenCredits).toBe(0);
+ expect(balance.tokenCredits).toBe(0);
// Second transaction - should keep balance at 0, not make it negative or increase it
const txData2 = {
@@ -425,7 +395,7 @@ describe('spendTokens', () => {
// Check balance after second transaction - should still be 0
balance = await Balance.findOne({ user: userId });
- expect(balance!.tokenCredits).toBe(0);
+ expect(balance.tokenCredits).toBe(0);
// Verify all transactions were created
const transactions = await Transaction.find({ user: userId });
@@ -436,7 +406,7 @@ describe('spendTokens', () => {
// Log the transaction details for debugging
console.log('Structured transaction details:');
- transactionDetails.forEach((tx, i: number) => {
+ transactionDetails.forEach((tx, i) => {
console.log(`Transaction ${i + 1}:`, {
tokenType: tx.tokenType,
rawAmount: tx.rawAmount,
@@ -483,7 +453,7 @@ describe('spendTokens', () => {
// Verify balance was reduced to exactly 0, not negative
const balance = await Balance.findOne({ user: userId });
expect(balance).toBeDefined();
- expect(balance!.tokenCredits).toBe(0);
+ expect(balance.tokenCredits).toBe(0);
// The result should show the adjusted values
expect(result).toEqual({
@@ -524,7 +494,7 @@ describe('spendTokens', () => {
}));
// Process all transactions concurrently to simulate race conditions
- const promises: Promise[] = [];
+ const promises = [];
let expectedTotalSpend = 0;
for (let i = 0; i < collectedUsage.length; i++) {
@@ -597,10 +567,10 @@ describe('spendTokens', () => {
console.log('Initial balance:', initialBalance);
console.log('Expected total spend:', expectedTotalSpend);
console.log('Expected final balance:', expectedFinalBalance);
- console.log('Actual final balance:', finalBalance!.tokenCredits);
+ console.log('Actual final balance:', finalBalance.tokenCredits);
// Allow for small rounding differences
- expect(finalBalance!.tokenCredits).toBeCloseTo(expectedFinalBalance, 0);
+ expect(finalBalance.tokenCredits).toBeCloseTo(expectedFinalBalance, 0);
// Verify all transactions were created
const transactions = await Transaction.find({
@@ -617,19 +587,19 @@ describe('spendTokens', () => {
let totalTokenValue = 0;
transactions.forEach((tx) => {
console.log(`${tx.tokenType}: rawAmount=${tx.rawAmount}, tokenValue=${tx.tokenValue}`);
- totalTokenValue += tx.tokenValue!;
+ totalTokenValue += tx.tokenValue;
});
console.log('Total token value from transactions:', totalTokenValue);
// The difference between expected and actual is significant
// This is likely due to the multipliers being different in the test environment
// Let's adjust our expectation based on the actual transactions
- const actualSpend = initialBalance - finalBalance!.tokenCredits;
+ const actualSpend = initialBalance - finalBalance.tokenCredits;
console.log('Actual spend:', actualSpend);
// Instead of checking the exact balance, let's verify that:
// 1. The balance was reduced (tokens were spent)
- expect(finalBalance!.tokenCredits).toBeLessThan(initialBalance);
+ expect(finalBalance.tokenCredits).toBeLessThan(initialBalance);
// 2. The total token value from transactions matches the actual spend
expect(Math.abs(totalTokenValue)).toBeCloseTo(actualSpend, -3); // Allow for larger differences
});
@@ -646,7 +616,7 @@ describe('spendTokens', () => {
const numberOfRefills = 25;
const refillAmount = 1000;
- const promises: Promise[] = [];
+ const promises = [];
for (let i = 0; i < numberOfRefills; i++) {
promises.push(
createAutoRefillTransaction({
@@ -672,10 +642,10 @@ describe('spendTokens', () => {
console.log('Initial balance (Increase Test):', initialBalance);
console.log(`Performed ${numberOfRefills} refills of ${refillAmount} each.`);
console.log('Expected final balance (Increase Test):', expectedFinalBalance);
- console.log('Actual final balance (Increase Test):', finalBalance!.tokenCredits);
+ console.log('Actual final balance (Increase Test):', finalBalance.tokenCredits);
// Use toBeCloseTo for safety, though toBe should work for integer math
- expect(finalBalance!.tokenCredits).toBeCloseTo(expectedFinalBalance, 0);
+ expect(finalBalance.tokenCredits).toBeCloseTo(expectedFinalBalance, 0);
// Verify all transactions were created
const transactions = await Transaction.find({
@@ -687,13 +657,12 @@ describe('spendTokens', () => {
expect(transactions.length).toBe(numberOfRefills);
// Optional: Verify the sum of increments from the results matches the balance change
- const totalIncrementReported = results.reduce((sum: number, result) => {
+ const totalIncrementReported = results.reduce((sum, result) => {
// Assuming createAutoRefillTransaction returns an object with the increment amount
// Adjust this based on the actual return structure.
// Let's assume it returns { balance: newBalance, transaction: { rawAmount: ... } }
// Or perhaps we check the transaction.rawAmount directly
- const r = result as Record>;
- return sum + ((r?.transaction?.rawAmount as number) || 0);
+ return sum + (result?.transaction?.rawAmount || 0);
}, 0);
console.log('Total increment reported by results:', totalIncrementReported);
expect(totalIncrementReported).toBe(expectedFinalBalance - initialBalance);
@@ -704,7 +673,7 @@ describe('spendTokens', () => {
// For refills, rawAmount is positive, and tokenValue might be calculated based on it
// Let's assume tokenValue directly reflects the increment for simplicity here
// If calculation is involved, adjust accordingly
- totalTokenValueFromDb += tx.rawAmount!; // Or tx.tokenValue if that holds the increment
+ totalTokenValueFromDb += tx.rawAmount; // Or tx.tokenValue if that holds the increment
});
console.log('Total rawAmount from DB transactions:', totalTokenValueFromDb);
expect(totalTokenValueFromDb).toBeCloseTo(expectedFinalBalance - initialBalance, 0);
@@ -764,7 +733,7 @@ describe('spendTokens', () => {
// Verify balance was updated
const balance = await Balance.findOne({ user: userId });
expect(balance).toBeDefined();
- expect(balance!.tokenCredits).toBeLessThan(10000); // Balance should be reduced
+ expect(balance.tokenCredits).toBeLessThan(10000); // Balance should be reduced
});
describe('premium token pricing', () => {
@@ -793,7 +762,7 @@ describe('spendTokens', () => {
promptTokens * tokenValues[model].prompt + completionTokens * tokenValues[model].completion;
const balance = await Balance.findOne({ user: userId });
- expect(balance?.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
+ expect(balance.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
});
it('should charge premium rates for claude-opus-4-6 when prompt tokens exceed threshold', async () => {
@@ -822,7 +791,7 @@ describe('spendTokens', () => {
completionTokens * premiumTokenValues[model].completion;
const balance = await Balance.findOne({ user: userId });
- expect(balance?.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
+ expect(balance.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
});
it('should charge premium rates for both prompt and completion in structured tokens when above threshold', async () => {
@@ -859,13 +828,12 @@ describe('spendTokens', () => {
const expectedPromptCost =
tokenUsage.promptTokens.input * premiumPromptRate +
- tokenUsage.promptTokens.write * (writeRate ?? 0) +
- tokenUsage.promptTokens.read * (readRate ?? 0);
+ tokenUsage.promptTokens.write * writeRate +
+ tokenUsage.promptTokens.read * readRate;
const expectedCompletionCost = tokenUsage.completionTokens * premiumCompletionRate;
- expect(result).not.toBeNull();
- expect(result!.prompt!.prompt).toBeCloseTo(-expectedPromptCost, 0);
- expect(result!.completion!.completion).toBeCloseTo(-expectedCompletionCost, 0);
+ expect(result.prompt.prompt).toBeCloseTo(-expectedPromptCost, 0);
+ expect(result.completion.completion).toBeCloseTo(-expectedCompletionCost, 0);
});
it('should charge standard rates for structured tokens when below threshold', async () => {
@@ -902,143 +870,12 @@ describe('spendTokens', () => {
const expectedPromptCost =
tokenUsage.promptTokens.input * standardPromptRate +
- tokenUsage.promptTokens.write * (writeRate ?? 0) +
- tokenUsage.promptTokens.read * (readRate ?? 0);
+ tokenUsage.promptTokens.write * writeRate +
+ tokenUsage.promptTokens.read * readRate;
const expectedCompletionCost = tokenUsage.completionTokens * standardCompletionRate;
- expect(result).not.toBeNull();
- expect(result!.prompt!.prompt).toBeCloseTo(-expectedPromptCost, 0);
- expect(result!.completion!.completion).toBeCloseTo(-expectedCompletionCost, 0);
- });
-
- it('should charge standard rates for gemini-3.1-pro-preview when prompt tokens are below threshold', async () => {
- const initialBalance = 100000000;
- await Balance.create({
- user: userId,
- tokenCredits: initialBalance,
- });
-
- const model = 'gemini-3.1-pro-preview';
- const promptTokens = 100000;
- const completionTokens = 500;
-
- const txData = {
- user: userId,
- conversationId: 'test-gemini31-standard-pricing',
- model,
- context: 'test',
- balance: { enabled: true },
- };
-
- await spendTokens(txData, { promptTokens, completionTokens });
-
- const expectedCost =
- promptTokens * tokenValues['gemini-3.1'].prompt +
- completionTokens * tokenValues['gemini-3.1'].completion;
-
- const balance = await Balance.findOne({ user: userId });
- expect(balance!.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
- });
-
- it('should charge premium rates for gemini-3.1-pro-preview when prompt tokens exceed threshold', async () => {
- const initialBalance = 100000000;
- await Balance.create({
- user: userId,
- tokenCredits: initialBalance,
- });
-
- const model = 'gemini-3.1-pro-preview';
- const promptTokens = 250000;
- const completionTokens = 500;
-
- const txData = {
- user: userId,
- conversationId: 'test-gemini31-premium-pricing',
- model,
- context: 'test',
- balance: { enabled: true },
- };
-
- await spendTokens(txData, { promptTokens, completionTokens });
-
- const expectedCost =
- promptTokens * premiumTokenValues['gemini-3.1'].prompt +
- completionTokens * premiumTokenValues['gemini-3.1'].completion;
-
- const balance = await Balance.findOne({ user: userId });
- expect(balance!.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
- });
-
- it('should charge premium rates for gemini-3.1-pro-preview-customtools when prompt tokens exceed threshold', async () => {
- const initialBalance = 100000000;
- await Balance.create({
- user: userId,
- tokenCredits: initialBalance,
- });
-
- const model = 'gemini-3.1-pro-preview-customtools';
- const promptTokens = 250000;
- const completionTokens = 500;
-
- const txData = {
- user: userId,
- conversationId: 'test-gemini31-customtools-premium',
- model,
- context: 'test',
- balance: { enabled: true },
- };
-
- await spendTokens(txData, { promptTokens, completionTokens });
-
- const expectedCost =
- promptTokens * premiumTokenValues['gemini-3.1'].prompt +
- completionTokens * premiumTokenValues['gemini-3.1'].completion;
-
- const balance = await Balance.findOne({ user: userId });
- expect(balance!.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
- });
-
- it('should charge premium rates for structured gemini-3.1 tokens when total input exceeds threshold', async () => {
- const initialBalance = 100000000;
- await Balance.create({
- user: userId,
- tokenCredits: initialBalance,
- });
-
- const model = 'gemini-3.1-pro-preview';
- const txData = {
- user: userId,
- conversationId: 'test-gemini31-structured-premium',
- model,
- context: 'test',
- balance: { enabled: true },
- };
-
- const tokenUsage = {
- promptTokens: {
- input: 200000,
- write: 10000,
- read: 5000,
- },
- completionTokens: 1000,
- };
-
- const result = await spendStructuredTokens(txData, tokenUsage);
-
- const premiumPromptRate = premiumTokenValues['gemini-3.1'].prompt;
- const premiumCompletionRate = premiumTokenValues['gemini-3.1'].completion;
- const writeRate = getCacheMultiplier({ model, cacheType: 'write' });
- const readRate = getCacheMultiplier({ model, cacheType: 'read' });
-
- const expectedPromptCost =
- tokenUsage.promptTokens.input * premiumPromptRate +
- tokenUsage.promptTokens.write * writeRate! +
- tokenUsage.promptTokens.read * readRate!;
- const expectedCompletionCost = tokenUsage.completionTokens * premiumCompletionRate;
-
- expect(result).not.toBeNull();
- expect(result!.prompt!.prompt).toBeCloseTo(-expectedPromptCost, 0);
- expect(result!.completion!.completion).toBeCloseTo(-expectedCompletionCost, 0);
+ expect(result.prompt.prompt).toBeCloseTo(-expectedPromptCost, 0);
+ expect(result.completion.completion).toBeCloseTo(-expectedCompletionCost, 0);
});
it('should not apply premium pricing to non-premium models regardless of prompt size', async () => {
@@ -1066,7 +903,7 @@ describe('spendTokens', () => {
promptTokens * tokenValues[model].prompt + completionTokens * tokenValues[model].completion;
const balance = await Balance.findOne({ user: userId });
- expect(balance?.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
+ expect(balance.tokenCredits).toBeCloseTo(initialBalance - expectedCost, 0);
});
});
@@ -1092,11 +929,11 @@ describe('spendTokens', () => {
const completionTx = transactions.find((t) => t.tokenType === 'completion');
const promptTx = transactions.find((t) => t.tokenType === 'prompt');
- expect(Math.abs(promptTx?.rawAmount ?? 0)).toBe(0);
- expect(completionTx?.rawAmount).toBe(-100);
+ expect(Math.abs(promptTx.rawAmount)).toBe(0);
+ expect(completionTx.rawAmount).toBe(-100);
const standardCompletionRate = tokenValues['claude-opus-4-6'].completion;
- expect(completionTx?.rate).toBe(standardCompletionRate);
+ expect(completionTx.rate).toBe(standardCompletionRate);
});
it('should use normalized inputTokenCount for premium threshold check on completion', async () => {
@@ -1126,8 +963,8 @@ describe('spendTokens', () => {
const premiumPromptRate = premiumTokenValues[model].prompt;
const premiumCompletionRate = premiumTokenValues[model].completion;
- expect(promptTx?.rate).toBe(premiumPromptRate);
- expect(completionTx?.rate).toBe(premiumCompletionRate);
+ expect(promptTx.rate).toBe(premiumPromptRate);
+ expect(completionTx.rate).toBe(premiumCompletionRate);
});
it('should keep inputTokenCount as zero when promptTokens is zero', async () => {
@@ -1150,10 +987,10 @@ describe('spendTokens', () => {
const completionTx = transactions.find((t) => t.tokenType === 'completion');
const promptTx = transactions.find((t) => t.tokenType === 'prompt');
- expect(Math.abs(promptTx?.rawAmount ?? 0)).toBe(0);
+ expect(Math.abs(promptTx.rawAmount)).toBe(0);
const standardCompletionRate = tokenValues['claude-opus-4-6'].completion;
- expect(completionTx?.rate).toBe(standardCompletionRate);
+ expect(completionTx.rate).toBe(standardCompletionRate);
});
it('should not trigger premium pricing with negative promptTokens on premium model', async () => {
@@ -1178,7 +1015,7 @@ describe('spendTokens', () => {
const completionTx = transactions.find((t) => t.tokenType === 'completion');
const standardCompletionRate = tokenValues[model].completion;
- expect(completionTx?.rate).toBe(standardCompletionRate);
+ expect(completionTx.rate).toBe(standardCompletionRate);
});
it('should normalize negative structured token values to zero in spendStructuredTokens', async () => {
@@ -1212,14 +1049,14 @@ describe('spendTokens', () => {
const completionTx = transactions.find((t) => t.tokenType === 'completion');
const promptTx = transactions.find((t) => t.tokenType === 'prompt');
- expect(Math.abs(promptTx?.inputTokens ?? 0)).toBe(0);
- expect(promptTx?.writeTokens).toBe(-50);
- expect(Math.abs(promptTx?.readTokens ?? 0)).toBe(0);
+ expect(Math.abs(promptTx.inputTokens)).toBe(0);
+ expect(promptTx.writeTokens).toBe(-50);
+ expect(Math.abs(promptTx.readTokens)).toBe(0);
- expect(Math.abs(completionTx?.rawAmount ?? 0)).toBe(0);
+ expect(Math.abs(completionTx.rawAmount)).toBe(0);
const standardRate = tokenValues[model].completion;
- expect(completionTx?.rate).toBe(standardRate);
+ expect(completionTx.rate).toBe(standardRate);
});
});
});
diff --git a/packages/data-schemas/src/methods/tx.ts b/api/models/tx.js
similarity index 59%
rename from packages/data-schemas/src/methods/tx.ts
rename to api/models/tx.js
index a048874457..9a6305ec5c 100644
--- a/packages/data-schemas/src/methods/tx.ts
+++ b/api/models/tx.js
@@ -1,46 +1,57 @@
+const { matchModelName, findMatchingPattern } = require('@librechat/api');
+const defaultRate = 6;
+
/**
* Token Pricing Configuration
*
- * Pattern Matching
- * ================
- * `findMatchingPattern` uses `modelName.includes(key)` and selects the **longest**
- * matching key. If a key's length equals the model name's length (exact match), it
- * returns immediately — no further keys are checked.
+ * IMPORTANT: Key Ordering for Pattern Matching
+ * ============================================
+ * The `findMatchingPattern` function iterates through object keys in REVERSE order
+ * (last-defined keys are checked first) and uses `modelName.includes(key)` for matching.
*
- * For keys of different lengths, definition order does not affect the result — the
- * longest match always wins. For **same-length ties**, the function iterates in
- * reverse, so the last-defined key wins. Key ordering therefore matters for:
- * 1. **Performance**: list older/legacy models first, newer models last — newer
- * models are more commonly used and will match earlier in the reverse scan.
- * 2. **Same-length tie-breaking**: when two keys of equal length both match,
- * the last-defined key wins.
+ * This means:
+ * 1. BASE PATTERNS must be defined FIRST (e.g., "kimi", "moonshot")
+ * 2. SPECIFIC PATTERNS must be defined AFTER their base patterns (e.g., "kimi-k2", "kimi-k2.5")
+ *
+ * Example ordering for Kimi models:
+ * kimi: { prompt: 0.6, completion: 2.5 }, // Base pattern - checked last
+ * 'kimi-k2': { prompt: 0.6, completion: 2.5 }, // More specific - checked before "kimi"
+ * 'kimi-k2.5': { prompt: 0.6, completion: 3.0 }, // Most specific - checked first
+ *
+ * Why this matters:
+ * - Model name "kimi-k2.5" contains both "kimi" and "kimi-k2" as substrings
+ * - If "kimi" were checked first, it would incorrectly match and return wrong pricing
+ * - By defining specific patterns AFTER base patterns, they're checked first in reverse iteration
+ *
+ * This applies to BOTH `tokenValues` and `cacheTokenValues` objects.
+ *
+ * When adding new model families:
+ * 1. Define the base/generic pattern first
+ * 2. Define increasingly specific patterns after
+ * 3. Ensure no pattern is a substring of another that should match differently
*/
-export interface TxDeps {
- /** From @librechat/api — matches a model name to a canonical key. */
- matchModelName: (model: string, endpoint?: string) => string | undefined;
- /** From @librechat/api — finds the longest key in `values` whose key is a substring of `model`. */
- findMatchingPattern: (
- model: string,
- values: Record>,
- ) => string | undefined;
-}
-
-export const defaultRate = 6;
-
-/** AWS Bedrock pricing (source: https://aws.amazon.com/bedrock/pricing/) */
-const bedrockValues: Record = {
+/**
+ * AWS Bedrock pricing
+ * source: https://aws.amazon.com/bedrock/pricing/
+ */
+const bedrockValues = {
+ // Basic llama2 patterns (base defaults to smallest variant)
llama2: { prompt: 0.75, completion: 1.0 },
'llama-2': { prompt: 0.75, completion: 1.0 },
'llama2-13b': { prompt: 0.75, completion: 1.0 },
'llama2:70b': { prompt: 1.95, completion: 2.56 },
'llama2-70b': { prompt: 1.95, completion: 2.56 },
+
+ // Basic llama3 patterns (base defaults to smallest variant)
llama3: { prompt: 0.3, completion: 0.6 },
'llama-3': { prompt: 0.3, completion: 0.6 },
'llama3-8b': { prompt: 0.3, completion: 0.6 },
'llama3:8b': { prompt: 0.3, completion: 0.6 },
'llama3-70b': { prompt: 2.65, completion: 3.5 },
'llama3:70b': { prompt: 2.65, completion: 3.5 },
+
+ // llama3-x-Nb pattern (base defaults to smallest variant)
'llama3-1': { prompt: 0.22, completion: 0.22 },
'llama3-1-8b': { prompt: 0.22, completion: 0.22 },
'llama3-1-70b': { prompt: 0.72, completion: 0.72 },
@@ -52,6 +63,8 @@ const bedrockValues: Record = {
'llama3-2-90b': { prompt: 0.72, completion: 0.72 },
'llama3-3': { prompt: 2.65, completion: 3.5 },
'llama3-3-70b': { prompt: 2.65, completion: 3.5 },
+
+ // llama3.x:Nb pattern (base defaults to smallest variant)
'llama3.1': { prompt: 0.22, completion: 0.22 },
'llama3.1:8b': { prompt: 0.22, completion: 0.22 },
'llama3.1:70b': { prompt: 0.72, completion: 0.72 },
@@ -63,6 +76,8 @@ const bedrockValues: Record = {
'llama3.2:90b': { prompt: 0.72, completion: 0.72 },
'llama3.3': { prompt: 2.65, completion: 3.5 },
'llama3.3:70b': { prompt: 2.65, completion: 3.5 },
+
+ // llama-3.x-Nb pattern (base defaults to smallest variant)
'llama-3.1': { prompt: 0.22, completion: 0.22 },
'llama-3.1-8b': { prompt: 0.22, completion: 0.22 },
'llama-3.1-70b': { prompt: 0.72, completion: 0.72 },
@@ -81,17 +96,21 @@ const bedrockValues: Record = {
'mistral-large-2407': { prompt: 3.0, completion: 9.0 },
'command-text': { prompt: 1.5, completion: 2.0 },
'command-light': { prompt: 0.3, completion: 0.6 },
+ // AI21 models
'j2-mid': { prompt: 12.5, completion: 12.5 },
'j2-ultra': { prompt: 18.8, completion: 18.8 },
'jamba-instruct': { prompt: 0.5, completion: 0.7 },
+ // Amazon Titan models
'titan-text-lite': { prompt: 0.15, completion: 0.2 },
'titan-text-express': { prompt: 0.2, completion: 0.6 },
'titan-text-premier': { prompt: 0.5, completion: 1.5 },
+ // Amazon Nova models
'nova-micro': { prompt: 0.035, completion: 0.14 },
'nova-lite': { prompt: 0.06, completion: 0.24 },
'nova-pro': { prompt: 0.8, completion: 3.2 },
'nova-premier': { prompt: 2.5, completion: 12.5 },
'deepseek.r1': { prompt: 1.35, completion: 5.4 },
+ // Moonshot/Kimi models on Bedrock
'moonshot.kimi': { prompt: 0.6, completion: 2.5 },
'moonshot.kimi-k2': { prompt: 0.6, completion: 2.5 },
'moonshot.kimi-k2.5': { prompt: 0.6, completion: 3.0 },
@@ -101,19 +120,23 @@ const bedrockValues: Record = {
/**
* Mapping of model token sizes to their respective multipliers for prompt and completion.
* The rates are 1 USD per 1M tokens.
+ * @type {Object.}
*/
-export const tokenValues: Record = Object.assign(
+const tokenValues = Object.assign(
{
+ // Legacy token size mappings (generic patterns - check LAST)
'8k': { prompt: 30, completion: 60 },
'32k': { prompt: 60, completion: 120 },
'4k': { prompt: 1.5, completion: 2 },
'16k': { prompt: 3, completion: 4 },
+ // Generic fallback patterns (check LAST)
'claude-': { prompt: 0.8, completion: 2.4 },
deepseek: { prompt: 0.28, completion: 0.42 },
command: { prompt: 0.38, completion: 0.38 },
- gemma: { prompt: 0.02, completion: 0.04 },
+ gemma: { prompt: 0.02, completion: 0.04 }, // Base pattern (using gemma-3n-e4b pricing)
gemini: { prompt: 0.5, completion: 1.5 },
'gpt-oss': { prompt: 0.05, completion: 0.2 },
+ // Specific model variants (check FIRST - more specific patterns at end)
'gpt-3.5-turbo-1106': { prompt: 1, completion: 2 },
'gpt-3.5-turbo-0125': { prompt: 0.5, completion: 1.5 },
'gpt-4-1106': { prompt: 10, completion: 30 },
@@ -127,14 +150,9 @@ export const tokenValues: Record
'gpt-5': { prompt: 1.25, completion: 10 },
'gpt-5.1': { prompt: 1.25, completion: 10 },
'gpt-5.2': { prompt: 1.75, completion: 14 },
- 'gpt-5.3': { prompt: 1.75, completion: 14 },
- 'gpt-5.4': { prompt: 2.5, completion: 15 },
- // TODO: gpt-5.4-pro pricing not yet officially published — verify before release
- 'gpt-5.4-pro': { prompt: 5, completion: 30 },
'gpt-5-nano': { prompt: 0.05, completion: 0.4 },
'gpt-5-mini': { prompt: 0.25, completion: 2 },
'gpt-5-pro': { prompt: 15, completion: 120 },
- 'gpt-5.2-pro': { prompt: 21, completion: 168 },
o1: { prompt: 15, completion: 60 },
'o1-mini': { prompt: 1.1, completion: 4.4 },
'o1-preview': { prompt: 15, completion: 60 },
@@ -166,26 +184,24 @@ export const tokenValues: Record
'deepseek-reasoner': { prompt: 0.28, completion: 0.42 },
'deepseek-r1': { prompt: 0.4, completion: 2.0 },
'deepseek-v3': { prompt: 0.2, completion: 0.8 },
- 'gemma-2': { prompt: 0.01, completion: 0.03 },
- 'gemma-3': { prompt: 0.02, completion: 0.04 },
+ 'gemma-2': { prompt: 0.01, completion: 0.03 }, // Base pattern (using gemma-2-9b pricing)
+ 'gemma-3': { prompt: 0.02, completion: 0.04 }, // Base pattern (using gemma-3n-e4b pricing)
'gemma-3-27b': { prompt: 0.09, completion: 0.16 },
'gemini-1.5': { prompt: 2.5, completion: 10 },
'gemini-1.5-flash': { prompt: 0.15, completion: 0.6 },
'gemini-1.5-flash-8b': { prompt: 0.075, completion: 0.3 },
- 'gemini-2.0': { prompt: 0.1, completion: 0.4 },
+ 'gemini-2.0': { prompt: 0.1, completion: 0.4 }, // Base pattern (using 2.0-flash pricing)
'gemini-2.0-flash': { prompt: 0.1, completion: 0.4 },
'gemini-2.0-flash-lite': { prompt: 0.075, completion: 0.3 },
- 'gemini-2.5': { prompt: 0.3, completion: 2.5 },
+ 'gemini-2.5': { prompt: 0.3, completion: 2.5 }, // Base pattern (using 2.5-flash pricing)
'gemini-2.5-flash': { prompt: 0.3, completion: 2.5 },
'gemini-2.5-flash-lite': { prompt: 0.1, completion: 0.4 },
'gemini-2.5-pro': { prompt: 1.25, completion: 10 },
'gemini-2.5-flash-image': { prompt: 0.15, completion: 30 },
'gemini-3': { prompt: 2, completion: 12 },
'gemini-3-pro-image': { prompt: 2, completion: 120 },
- 'gemini-3.1': { prompt: 2, completion: 12 },
- 'gemini-3.1-flash-lite': { prompt: 0.25, completion: 1.5 },
'gemini-pro-vision': { prompt: 0.5, completion: 1.5 },
- grok: { prompt: 2.0, completion: 10.0 },
+ grok: { prompt: 2.0, completion: 10.0 }, // Base pattern defaults to grok-2
'grok-beta': { prompt: 5.0, completion: 15.0 },
'grok-vision-beta': { prompt: 5.0, completion: 15.0 },
'grok-2': { prompt: 2.0, completion: 10.0 },
@@ -200,7 +216,7 @@ export const tokenValues: Record
'grok-3-mini-fast': { prompt: 0.6, completion: 4 },
'grok-4': { prompt: 3.0, completion: 15.0 },
'grok-4-fast': { prompt: 0.2, completion: 0.5 },
- 'grok-4-1-fast': { prompt: 0.2, completion: 0.5 },
+ 'grok-4-1-fast': { prompt: 0.2, completion: 0.5 }, // covers reasoning & non-reasoning variants
'grok-code-fast': { prompt: 0.2, completion: 1.5 },
codestral: { prompt: 0.3, completion: 0.9 },
'ministral-3b': { prompt: 0.04, completion: 0.04 },
@@ -210,9 +226,10 @@ export const tokenValues: Record
'pixtral-large': { prompt: 2.0, completion: 6.0 },
'mistral-large': { prompt: 2.0, completion: 6.0 },
'mixtral-8x22b': { prompt: 0.65, completion: 0.65 },
- kimi: { prompt: 0.6, completion: 2.5 },
- moonshot: { prompt: 2.0, completion: 5.0 },
- 'kimi-latest': { prompt: 0.2, completion: 2.0 },
+ // Moonshot/Kimi models (base patterns first, specific patterns last for correct matching)
+ kimi: { prompt: 0.6, completion: 2.5 }, // Base pattern
+ moonshot: { prompt: 2.0, completion: 5.0 }, // Base pattern (using 128k pricing)
+ 'kimi-latest': { prompt: 0.2, completion: 2.0 }, // Uses 8k/32k/128k pricing dynamically
'kimi-k2': { prompt: 0.6, completion: 2.5 },
'kimi-k2.5': { prompt: 0.6, completion: 3.0 },
'kimi-k2-turbo': { prompt: 1.15, completion: 8.0 },
@@ -234,10 +251,12 @@ export const tokenValues: Record
'moonshot-v1-128k': { prompt: 2.0, completion: 5.0 },
'moonshot-v1-128k-vision': { prompt: 2.0, completion: 5.0 },
'moonshot-v1-128k-vision-preview': { prompt: 2.0, completion: 5.0 },
+ // GPT-OSS models (specific sizes)
'gpt-oss:20b': { prompt: 0.05, completion: 0.2 },
'gpt-oss-20b': { prompt: 0.05, completion: 0.2 },
'gpt-oss:120b': { prompt: 0.15, completion: 0.6 },
'gpt-oss-120b': { prompt: 0.15, completion: 0.6 },
+ // GLM models (Zhipu AI) - general to specific
glm4: { prompt: 0.1, completion: 0.1 },
'glm-4': { prompt: 0.1, completion: 0.1 },
'glm-4-32b': { prompt: 0.1, completion: 0.1 },
@@ -245,22 +264,26 @@ export const tokenValues: Record
'glm-4.5-air': { prompt: 0.14, completion: 0.86 },
'glm-4.5v': { prompt: 0.6, completion: 1.8 },
'glm-4.6': { prompt: 0.5, completion: 1.75 },
- qwen: { prompt: 0.08, completion: 0.33 },
- 'qwen2.5': { prompt: 0.08, completion: 0.33 },
+ // Qwen models
+ qwen: { prompt: 0.08, completion: 0.33 }, // Qwen base pattern (using qwen2.5-72b pricing)
+ 'qwen2.5': { prompt: 0.08, completion: 0.33 }, // Qwen 2.5 base pattern
'qwen-turbo': { prompt: 0.05, completion: 0.2 },
'qwen-plus': { prompt: 0.4, completion: 1.2 },
'qwen-max': { prompt: 1.6, completion: 6.4 },
'qwq-32b': { prompt: 0.15, completion: 0.4 },
- qwen3: { prompt: 0.035, completion: 0.138 },
+ // Qwen3 models
+ qwen3: { prompt: 0.035, completion: 0.138 }, // Qwen3 base pattern (using qwen3-4b pricing)
'qwen3-8b': { prompt: 0.035, completion: 0.138 },
'qwen3-14b': { prompt: 0.05, completion: 0.22 },
'qwen3-30b-a3b': { prompt: 0.06, completion: 0.22 },
'qwen3-32b': { prompt: 0.05, completion: 0.2 },
'qwen3-235b-a22b': { prompt: 0.08, completion: 0.55 },
+ // Qwen3 VL (Vision-Language) models
'qwen3-vl-8b-thinking': { prompt: 0.18, completion: 2.1 },
'qwen3-vl-8b-instruct': { prompt: 0.18, completion: 0.69 },
'qwen3-vl-30b-a3b': { prompt: 0.29, completion: 1.0 },
'qwen3-vl-235b-a22b': { prompt: 0.3, completion: 1.2 },
+ // Qwen3 specialized models
'qwen3-max': { prompt: 1.2, completion: 6 },
'qwen3-coder': { prompt: 0.22, completion: 0.95 },
'qwen3-coder-30b-a3b': { prompt: 0.06, completion: 0.25 },
@@ -273,9 +296,11 @@ export const tokenValues: Record
/**
* Mapping of model token sizes to their respective multipliers for cached input, read and write.
+ * See Anthropic's documentation on this: https://docs.anthropic.com/en/docs/build-with-claude/prompt-caching#pricing
* The rates are 1 USD per 1M tokens.
+ * @type {Object.}
*/
-export const cacheTokenValues: Record = {
+const cacheTokenValues = {
'claude-3.7-sonnet': { write: 3.75, read: 0.3 },
'claude-3-7-sonnet': { write: 3.75, read: 0.3 },
'claude-3.5-sonnet': { write: 3.75, read: 0.3 },
@@ -289,27 +314,11 @@ export const cacheTokenValues: Record =
'claude-opus-4': { write: 18.75, read: 1.5 },
'claude-opus-4-5': { write: 6.25, read: 0.5 },
'claude-opus-4-6': { write: 6.25, read: 0.5 },
- 'gpt-4o': { write: 2.5, read: 1.25 },
- 'gpt-4o-mini': { write: 0.15, read: 0.075 },
- 'gpt-4.1': { write: 2, read: 0.5 },
- 'gpt-4.1-mini': { write: 0.4, read: 0.1 },
- 'gpt-4.1-nano': { write: 0.1, read: 0.025 },
- 'gpt-5': { write: 1.25, read: 0.125 },
- 'gpt-5.1': { write: 1.25, read: 0.125 },
- 'gpt-5.2': { write: 1.75, read: 0.175 },
- 'gpt-5.3': { write: 1.75, read: 0.175 },
- 'gpt-5.4': { write: 2.5, read: 0.25 },
- 'gpt-5-mini': { write: 0.25, read: 0.025 },
- 'gpt-5-nano': { write: 0.05, read: 0.005 },
- o1: { write: 15, read: 7.5 },
- 'o1-mini': { write: 1.1, read: 0.55 },
- 'o1-preview': { write: 15, read: 7.5 },
- o3: { write: 2, read: 0.5 },
- 'o3-mini': { write: 1.1, read: 0.275 },
- 'o4-mini': { write: 1.1, read: 0.275 },
+ // DeepSeek models - cache hit: $0.028/1M, cache miss: $0.28/1M
deepseek: { write: 0.28, read: 0.028 },
'deepseek-chat': { write: 0.28, read: 0.028 },
'deepseek-reasoner': { write: 0.28, read: 0.028 },
+ // Moonshot/Kimi models - cache hit: $0.15/1M (k2) or $0.10/1M (k2.5), cache miss: $0.60/1M
kimi: { write: 0.6, read: 0.15 },
'kimi-k2': { write: 0.6, read: 0.15 },
'kimi-k2.5': { write: 0.6, read: 0.1 },
@@ -321,177 +330,174 @@ export const cacheTokenValues: Record =
'kimi-k2-0711-preview': { write: 0.6, read: 0.15 },
'kimi-k2-thinking': { write: 0.6, read: 0.15 },
'kimi-k2-thinking-turbo': { write: 1.15, read: 0.15 },
- // Gemini 3.1 Pro - cache write: $2.00/1M, cache read: $0.20/1M
- 'gemini-3.1': { write: 2, read: 0.2 },
- // Gemini 3.1 Flash-Lite - cache write: $0.25/1M, cache read: $0.025/1M
- 'gemini-3.1-flash-lite': { write: 0.25, read: 0.025 },
};
/**
* Premium (tiered) pricing for models whose rates change based on prompt size.
+ * Each entry specifies the token threshold and the rates that apply above it.
+ * @type {Object.}
*/
-export const premiumTokenValues: Record<
- string,
- { threshold: number; prompt: number; completion: number }
-> = {
+const premiumTokenValues = {
'claude-opus-4-6': { threshold: 200000, prompt: 10, completion: 37.5 },
'claude-sonnet-4-6': { threshold: 200000, prompt: 6, completion: 22.5 },
- 'gemini-3.1': { threshold: 200000, prompt: 4, completion: 18 },
};
-export function createTxMethods(_mongoose: typeof import('mongoose'), txDeps: TxDeps) {
- const { matchModelName, findMatchingPattern } = txDeps;
-
- /**
- * Retrieves the key associated with a given model name.
- */
- function getValueKey(model: string, endpoint?: string): string | undefined {
- if (!model || typeof model !== 'string') {
- return undefined;
- }
-
- if (!endpoint || (typeof endpoint === 'string' && !tokenValues[endpoint])) {
- const matchedKey = findMatchingPattern(model, tokenValues);
- if (matchedKey) {
- return matchedKey;
- }
- }
-
- const modelName = matchModelName(model, endpoint);
- if (!modelName) {
- return undefined;
- }
-
- if (modelName.includes('gpt-3.5-turbo-16k')) {
- return '16k';
- } else if (modelName.includes('gpt-3.5')) {
- return '4k';
- } else if (modelName.includes('gpt-4-vision')) {
- return 'gpt-4-1106';
- } else if (modelName.includes('gpt-4-0125')) {
- return 'gpt-4-1106';
- } else if (modelName.includes('gpt-4-turbo')) {
- return 'gpt-4-1106';
- } else if (modelName.includes('gpt-4-32k')) {
- return '32k';
- } else if (modelName.includes('gpt-4')) {
- return '8k';
- }
-
+/**
+ * Retrieves the key associated with a given model name.
+ *
+ * @param {string} model - The model name to match.
+ * @param {string} endpoint - The endpoint name to match.
+ * @returns {string|undefined} The key corresponding to the model name, or undefined if no match is found.
+ */
+const getValueKey = (model, endpoint) => {
+ if (!model || typeof model !== 'string') {
return undefined;
}
- /**
- * Checks if premium (tiered) pricing applies and returns the premium rate.
- */
- function getPremiumRate(
- valueKey: string,
- tokenType: string,
- inputTokenCount?: number | null,
- ): number | null {
- if (inputTokenCount == null) {
- return null;
+ // Use findMatchingPattern directly against tokenValues for efficient lookup
+ if (!endpoint || (typeof endpoint === 'string' && !tokenValues[endpoint])) {
+ const matchedKey = findMatchingPattern(model, tokenValues);
+ if (matchedKey) {
+ return matchedKey;
}
- const premiumEntry = premiumTokenValues[valueKey];
- if (!premiumEntry || inputTokenCount <= premiumEntry.threshold) {
- return null;
- }
- return premiumEntry[tokenType as 'prompt' | 'completion'] ?? null;
}
- /**
- * Retrieves the multiplier for a given value key and token type.
- */
- function getMultiplier({
- model,
- valueKey,
- endpoint,
- tokenType,
- inputTokenCount,
- endpointTokenConfig,
- }: {
- model?: string;
- valueKey?: string;
- endpoint?: string;
- tokenType?: 'prompt' | 'completion';
- inputTokenCount?: number;
- endpointTokenConfig?: Record>;
- }): number {
- if (endpointTokenConfig && model) {
- return endpointTokenConfig?.[model]?.[tokenType as string] ?? defaultRate;
- }
+ // Fallback: use matchModelName for edge cases and legacy handling
+ const modelName = matchModelName(model, endpoint);
+ if (!modelName) {
+ return undefined;
+ }
- if (valueKey && tokenType) {
- const premiumRate = getPremiumRate(valueKey, tokenType, inputTokenCount);
- if (premiumRate != null) {
- return premiumRate;
- }
- return tokenValues[valueKey]?.[tokenType] ?? defaultRate;
- }
+ // Legacy token size mappings and aliases for older models
+ if (modelName.includes('gpt-3.5-turbo-16k')) {
+ return '16k';
+ } else if (modelName.includes('gpt-3.5')) {
+ return '4k';
+ } else if (modelName.includes('gpt-4-vision')) {
+ return 'gpt-4-1106'; // Alias for gpt-4-vision
+ } else if (modelName.includes('gpt-4-0125')) {
+ return 'gpt-4-1106'; // Alias for gpt-4-0125
+ } else if (modelName.includes('gpt-4-turbo')) {
+ return 'gpt-4-1106'; // Alias for gpt-4-turbo
+ } else if (modelName.includes('gpt-4-32k')) {
+ return '32k';
+ } else if (modelName.includes('gpt-4')) {
+ return '8k';
+ }
- if (!tokenType || !model) {
- return 1;
- }
+ return undefined;
+};
- valueKey = getValueKey(model, endpoint);
- if (!valueKey) {
- return defaultRate;
- }
+/**
+ * Retrieves the multiplier for a given value key and token type. If no value key is provided,
+ * it attempts to derive it from the model name.
+ *
+ * @param {Object} params - The parameters for the function.
+ * @param {string} [params.valueKey] - The key corresponding to the model name.
+ * @param {'prompt' | 'completion'} [params.tokenType] - The type of token (e.g., 'prompt' or 'completion').
+ * @param {string} [params.model] - The model name to derive the value key from if not provided.
+ * @param {string} [params.endpoint] - The endpoint name to derive the value key from if not provided.
+ * @param {EndpointTokenConfig} [params.endpointTokenConfig] - The token configuration for the endpoint.
+ * @param {number} [params.inputTokenCount] - Total input token count for tiered pricing.
+ * @returns {number} The multiplier for the given parameters, or a default value if not found.
+ */
+const getMultiplier = ({
+ model,
+ valueKey,
+ endpoint,
+ tokenType,
+ inputTokenCount,
+ endpointTokenConfig,
+}) => {
+ if (endpointTokenConfig) {
+ return endpointTokenConfig?.[model]?.[tokenType] ?? defaultRate;
+ }
+ if (valueKey && tokenType) {
const premiumRate = getPremiumRate(valueKey, tokenType, inputTokenCount);
if (premiumRate != null) {
return premiumRate;
}
-
return tokenValues[valueKey]?.[tokenType] ?? defaultRate;
}
- /**
- * Retrieves the cache multiplier for a given value key and token type.
- */
- function getCacheMultiplier({
- valueKey,
- cacheType,
- model,
- endpoint,
- endpointTokenConfig,
- }: {
- valueKey?: string;
- cacheType?: 'write' | 'read';
- model?: string;
- endpoint?: string;
- endpointTokenConfig?: Record>;
- }): number | null {
- if (endpointTokenConfig && model) {
- return endpointTokenConfig?.[model]?.[cacheType as string] ?? null;
- }
+ if (!tokenType || !model) {
+ return 1;
+ }
- if (valueKey && cacheType) {
- return cacheTokenValues[valueKey]?.[cacheType] ?? null;
- }
+ valueKey = getValueKey(model, endpoint);
+ if (!valueKey) {
+ return defaultRate;
+ }
- if (!cacheType || !model) {
- return null;
- }
+ const premiumRate = getPremiumRate(valueKey, tokenType, inputTokenCount);
+ if (premiumRate != null) {
+ return premiumRate;
+ }
- valueKey = getValueKey(model, endpoint);
- if (!valueKey) {
- return null;
- }
+ return tokenValues[valueKey]?.[tokenType] ?? defaultRate;
+};
+/**
+ * Checks if premium (tiered) pricing applies and returns the premium rate.
+ * Each model defines its own threshold in `premiumTokenValues`.
+ * @param {string} valueKey
+ * @param {string} tokenType
+ * @param {number} [inputTokenCount]
+ * @returns {number|null}
+ */
+const getPremiumRate = (valueKey, tokenType, inputTokenCount) => {
+ if (inputTokenCount == null) {
+ return null;
+ }
+ const premiumEntry = premiumTokenValues[valueKey];
+ if (!premiumEntry || inputTokenCount <= premiumEntry.threshold) {
+ return null;
+ }
+ return premiumEntry[tokenType] ?? null;
+};
+
+/**
+ * Retrieves the cache multiplier for a given value key and token type. If no value key is provided,
+ * it attempts to derive it from the model name.
+ *
+ * @param {Object} params - The parameters for the function.
+ * @param {string} [params.valueKey] - The key corresponding to the model name.
+ * @param {'write' | 'read'} [params.cacheType] - The type of token (e.g., 'write' or 'read').
+ * @param {string} [params.model] - The model name to derive the value key from if not provided.
+ * @param {string} [params.endpoint] - The endpoint name to derive the value key from if not provided.
+ * @param {EndpointTokenConfig} [params.endpointTokenConfig] - The token configuration for the endpoint.
+ * @returns {number | null} The multiplier for the given parameters, or `null` if not found.
+ */
+const getCacheMultiplier = ({ valueKey, cacheType, model, endpoint, endpointTokenConfig }) => {
+ if (endpointTokenConfig) {
+ return endpointTokenConfig?.[model]?.[cacheType] ?? null;
+ }
+
+ if (valueKey && cacheType) {
return cacheTokenValues[valueKey]?.[cacheType] ?? null;
}
- return {
- tokenValues,
- premiumTokenValues,
- getValueKey,
- getMultiplier,
- getPremiumRate,
- getCacheMultiplier,
- defaultRate,
- cacheTokenValues,
- };
-}
+ if (!cacheType || !model) {
+ return null;
+ }
-export type TxMethods = ReturnType;
+ valueKey = getValueKey(model, endpoint);
+ if (!valueKey) {
+ return null;
+ }
+
+ // If we got this far, and values[cacheType] is undefined somehow, return a rough average of default multipliers
+ return cacheTokenValues[valueKey]?.[cacheType] ?? null;
+};
+
+module.exports = {
+ tokenValues,
+ premiumTokenValues,
+ getValueKey,
+ getMultiplier,
+ getPremiumRate,
+ getCacheMultiplier,
+ defaultRate,
+ cacheTokenValues,
+};
diff --git a/packages/data-schemas/src/methods/tx.spec.ts b/api/models/tx.spec.js
similarity index 85%
rename from packages/data-schemas/src/methods/tx.spec.ts
rename to api/models/tx.spec.js
index d1e12e5a55..df1bec8619 100644
--- a/packages/data-schemas/src/methods/tx.spec.ts
+++ b/api/models/tx.spec.js
@@ -1,18 +1,16 @@
/** Note: No hard-coded values should be used in this file. */
-import { matchModelName, findMatchingPattern } from './test-helpers';
-import { EModelEndpoint } from 'librechat-data-provider';
-import {
- createTxMethods,
- tokenValues,
- cacheTokenValues,
- premiumTokenValues,
+const { maxTokensMap } = require('@librechat/api');
+const { EModelEndpoint } = require('librechat-data-provider');
+const {
defaultRate,
-} from './tx';
-
-const { getValueKey, getMultiplier, getPremiumRate, getCacheMultiplier } = createTxMethods(
- {} as typeof import('mongoose'),
- { matchModelName, findMatchingPattern },
-);
+ tokenValues,
+ getValueKey,
+ getMultiplier,
+ getPremiumRate,
+ cacheTokenValues,
+ getCacheMultiplier,
+ premiumTokenValues,
+} = require('./tx');
describe('getValueKey', () => {
it('should return "16k" for model name containing "gpt-3.5-turbo-16k"', () => {
@@ -54,24 +52,6 @@ describe('getValueKey', () => {
expect(getValueKey('openai/gpt-5.2')).toBe('gpt-5.2');
});
- it('should return "gpt-5.3" for model name containing "gpt-5.3"', () => {
- expect(getValueKey('gpt-5.3')).toBe('gpt-5.3');
- expect(getValueKey('gpt-5.3-chat-latest')).toBe('gpt-5.3');
- expect(getValueKey('gpt-5.3-codex')).toBe('gpt-5.3');
- expect(getValueKey('openai/gpt-5.3')).toBe('gpt-5.3');
- });
-
- it('should return "gpt-5.4" for model name containing "gpt-5.4"', () => {
- expect(getValueKey('gpt-5.4')).toBe('gpt-5.4');
- expect(getValueKey('gpt-5.4-thinking')).toBe('gpt-5.4');
- expect(getValueKey('openai/gpt-5.4')).toBe('gpt-5.4');
- });
-
- it('should return "gpt-5.4-pro" for model name containing "gpt-5.4-pro"', () => {
- expect(getValueKey('gpt-5.4-pro')).toBe('gpt-5.4-pro');
- expect(getValueKey('openai/gpt-5.4-pro')).toBe('gpt-5.4-pro');
- });
-
it('should return "gpt-3.5-turbo-1106" for model name containing "gpt-3.5-turbo-1106"', () => {
expect(getValueKey('gpt-3.5-turbo-1106-some-other-info')).toBe('gpt-3.5-turbo-1106');
expect(getValueKey('openai/gpt-3.5-turbo-1106')).toBe('gpt-3.5-turbo-1106');
@@ -158,12 +138,6 @@ describe('getValueKey', () => {
expect(getValueKey('gpt-5-pro-preview')).toBe('gpt-5-pro');
});
- it('should return "gpt-5.2-pro" for model name containing "gpt-5.2-pro"', () => {
- expect(getValueKey('gpt-5.2-pro')).toBe('gpt-5.2-pro');
- expect(getValueKey('gpt-5.2-pro-2025-03-01')).toBe('gpt-5.2-pro');
- expect(getValueKey('openai/gpt-5.2-pro')).toBe('gpt-5.2-pro');
- });
-
it('should return "gpt-4o" for model type of "gpt-4o"', () => {
expect(getValueKey('gpt-4o-2024-08-06')).toBe('gpt-4o');
expect(getValueKey('gpt-4o-2024-08-06-0718')).toBe('gpt-4o');
@@ -265,7 +239,6 @@ describe('getMultiplier', () => {
});
it('should return defaultRate if tokenType is provided but not found in tokenValues', () => {
- // @ts-expect-error: intentionally passing invalid tokenType to test error handling
expect(getMultiplier({ valueKey: '8k', tokenType: 'unknownType' })).toBe(defaultRate);
});
@@ -363,18 +336,6 @@ describe('getMultiplier', () => {
);
});
- it('should return the correct multiplier for gpt-5.2-pro', () => {
- expect(getMultiplier({ model: 'gpt-5.2-pro', tokenType: 'prompt' })).toBe(
- tokenValues['gpt-5.2-pro'].prompt,
- );
- expect(getMultiplier({ model: 'gpt-5.2-pro', tokenType: 'completion' })).toBe(
- tokenValues['gpt-5.2-pro'].completion,
- );
- expect(getMultiplier({ model: 'openai/gpt-5.2-pro', tokenType: 'prompt' })).toBe(
- tokenValues['gpt-5.2-pro'].prompt,
- );
- });
-
it('should return the correct multiplier for gpt-5.1', () => {
expect(getMultiplier({ model: 'gpt-5.1', tokenType: 'prompt' })).toBe(
tokenValues['gpt-5.1'].prompt,
@@ -399,48 +360,6 @@ describe('getMultiplier', () => {
);
});
- it('should return the correct multiplier for gpt-5.3', () => {
- expect(getMultiplier({ model: 'gpt-5.3', tokenType: 'prompt' })).toBe(
- tokenValues['gpt-5.3'].prompt,
- );
- expect(getMultiplier({ model: 'gpt-5.3', tokenType: 'completion' })).toBe(
- tokenValues['gpt-5.3'].completion,
- );
- expect(getMultiplier({ model: 'gpt-5.3-codex', tokenType: 'prompt' })).toBe(
- tokenValues['gpt-5.3'].prompt,
- );
- expect(getMultiplier({ model: 'openai/gpt-5.3', tokenType: 'completion' })).toBe(
- tokenValues['gpt-5.3'].completion,
- );
- });
-
- it('should return the correct multiplier for gpt-5.4', () => {
- expect(getMultiplier({ model: 'gpt-5.4', tokenType: 'prompt' })).toBe(
- tokenValues['gpt-5.4'].prompt,
- );
- expect(getMultiplier({ model: 'gpt-5.4', tokenType: 'completion' })).toBe(
- tokenValues['gpt-5.4'].completion,
- );
- expect(getMultiplier({ model: 'gpt-5.4-thinking', tokenType: 'prompt' })).toBe(
- tokenValues['gpt-5.4'].prompt,
- );
- expect(getMultiplier({ model: 'openai/gpt-5.4', tokenType: 'completion' })).toBe(
- tokenValues['gpt-5.4'].completion,
- );
- });
-
- it('should return the correct multiplier for gpt-5.4-pro', () => {
- expect(getMultiplier({ model: 'gpt-5.4-pro', tokenType: 'prompt' })).toBe(
- tokenValues['gpt-5.4-pro'].prompt,
- );
- expect(getMultiplier({ model: 'gpt-5.4-pro', tokenType: 'completion' })).toBe(
- tokenValues['gpt-5.4-pro'].completion,
- );
- expect(getMultiplier({ model: 'openai/gpt-5.4-pro', tokenType: 'prompt' })).toBe(
- tokenValues['gpt-5.4-pro'].prompt,
- );
- });
-
it('should return the correct multiplier for gpt-4o', () => {
const valueKey = getValueKey('gpt-4o-2024-08-06');
expect(getMultiplier({ valueKey, tokenType: 'prompt' })).toBe(tokenValues['gpt-4o'].prompt);
@@ -609,7 +528,7 @@ describe('AWS Bedrock Model Tests', () => {
const results = awsModels.map((model) => {
const valueKey = getValueKey(model, EModelEndpoint.bedrock);
const multiplier = getMultiplier({ valueKey, tokenType: 'prompt' });
- return tokenValues[valueKey!].prompt && multiplier === tokenValues[valueKey!].prompt;
+ return tokenValues[valueKey].prompt && multiplier === tokenValues[valueKey].prompt;
});
expect(results.every(Boolean)).toBe(true);
});
@@ -618,7 +537,7 @@ describe('AWS Bedrock Model Tests', () => {
const results = awsModels.map((model) => {
const valueKey = getValueKey(model, EModelEndpoint.bedrock);
const multiplier = getMultiplier({ valueKey, tokenType: 'completion' });
- return tokenValues[valueKey!].completion && multiplier === tokenValues[valueKey!].completion;
+ return tokenValues[valueKey].completion && multiplier === tokenValues[valueKey].completion;
});
expect(results.every(Boolean)).toBe(true);
});
@@ -874,7 +793,7 @@ describe('Deepseek Model Tests', () => {
const results = deepseekModels.map((model) => {
const valueKey = getValueKey(model);
const multiplier = getMultiplier({ valueKey, tokenType: 'prompt' });
- return tokenValues[valueKey!].prompt && multiplier === tokenValues[valueKey!].prompt;
+ return tokenValues[valueKey].prompt && multiplier === tokenValues[valueKey].prompt;
});
expect(results.every(Boolean)).toBe(true);
});
@@ -883,7 +802,7 @@ describe('Deepseek Model Tests', () => {
const results = deepseekModels.map((model) => {
const valueKey = getValueKey(model);
const multiplier = getMultiplier({ valueKey, tokenType: 'completion' });
- return tokenValues[valueKey!].completion && multiplier === tokenValues[valueKey!].completion;
+ return tokenValues[valueKey].completion && multiplier === tokenValues[valueKey].completion;
});
expect(results.every(Boolean)).toBe(true);
});
@@ -893,7 +812,7 @@ describe('Deepseek Model Tests', () => {
const valueKey = getValueKey(model);
expect(valueKey).toBe(model);
const multiplier = getMultiplier({ valueKey, tokenType: 'prompt' });
- const result = tokenValues[valueKey!].prompt && multiplier === tokenValues[valueKey!].prompt;
+ const result = tokenValues[valueKey].prompt && multiplier === tokenValues[valueKey].prompt;
expect(result).toBe(true);
});
@@ -1358,7 +1277,6 @@ describe('getCacheMultiplier', () => {
it('should return null if cacheType is provided but not found in cacheTokenValues', () => {
expect(
- // @ts-expect-error: intentionally passing invalid cacheType to test error handling
getCacheMultiplier({ valueKey: 'claude-3-5-sonnet', cacheType: 'unknownType' }),
).toBeNull();
});
@@ -1408,73 +1326,6 @@ describe('getCacheMultiplier', () => {
).toBeNull();
});
- it('should return correct cache multipliers for OpenAI models', () => {
- const openaiCacheModels = [
- 'gpt-4o',
- 'gpt-4o-mini',
- 'gpt-4.1',
- 'gpt-4.1-mini',
- 'gpt-4.1-nano',
- 'gpt-5',
- 'gpt-5.1',
- 'gpt-5.2',
- 'gpt-5.3',
- 'gpt-5.4',
- 'gpt-5-mini',
- 'gpt-5-nano',
- 'o1',
- 'o1-mini',
- 'o1-preview',
- 'o3',
- 'o3-mini',
- 'o4-mini',
- ];
-
- for (const model of openaiCacheModels) {
- expect(getCacheMultiplier({ model, cacheType: 'write' })).toBe(cacheTokenValues[model].write);
- expect(getCacheMultiplier({ model, cacheType: 'read' })).toBe(cacheTokenValues[model].read);
- }
- });
-
- it('should return correct cache multipliers for OpenAI dated variants', () => {
- expect(getCacheMultiplier({ model: 'gpt-4o-2024-08-06', cacheType: 'read' })).toBe(
- cacheTokenValues['gpt-4o'].read,
- );
- expect(getCacheMultiplier({ model: 'gpt-4.1-2026-01-01', cacheType: 'read' })).toBe(
- cacheTokenValues['gpt-4.1'].read,
- );
- expect(getCacheMultiplier({ model: 'gpt-5.3-codex', cacheType: 'read' })).toBe(
- cacheTokenValues['gpt-5.3'].read,
- );
- expect(getCacheMultiplier({ model: 'openai/gpt-5.3', cacheType: 'write' })).toBe(
- cacheTokenValues['gpt-5.3'].write,
- );
- });
-
- it('should return null for pro models that do not support caching', () => {
- expect(getCacheMultiplier({ model: 'gpt-5-pro', cacheType: 'read' })).toBeNull();
- expect(getCacheMultiplier({ model: 'gpt-5-pro', cacheType: 'write' })).toBeNull();
- expect(getCacheMultiplier({ model: 'gpt-5.2-pro', cacheType: 'read' })).toBeNull();
- expect(getCacheMultiplier({ model: 'gpt-5.2-pro', cacheType: 'write' })).toBeNull();
- expect(getCacheMultiplier({ model: 'gpt-5.4-pro', cacheType: 'read' })).toBeNull();
- expect(getCacheMultiplier({ model: 'gpt-5.4-pro', cacheType: 'write' })).toBeNull();
- });
-
- it('should have consistent 10% cache read pricing for gpt-5.x models', () => {
- const gpt5CacheModels = [
- 'gpt-5',
- 'gpt-5.1',
- 'gpt-5.2',
- 'gpt-5.3',
- 'gpt-5.4',
- 'gpt-5-mini',
- 'gpt-5-nano',
- ];
- for (const model of gpt5CacheModels) {
- expect(cacheTokenValues[model].read).toBeCloseTo(cacheTokenValues[model].write * 0.1, 10);
- }
- });
-
it('should handle models with "bedrock/" prefix', () => {
expect(
getCacheMultiplier({
@@ -1494,9 +1345,6 @@ describe('getCacheMultiplier', () => {
describe('Google Model Tests', () => {
const googleModels = [
'gemini-3',
- 'gemini-3.1-pro-preview',
- 'gemini-3.1-pro-preview-customtools',
- 'gemini-3.1-flash-lite-preview',
'gemini-2.5-pro',
'gemini-2.5-flash',
'gemini-2.5-flash-lite',
@@ -1533,17 +1381,14 @@ describe('Google Model Tests', () => {
});
results.forEach(({ valueKey, promptRate, completionRate }) => {
- expect(promptRate).toBe(tokenValues[valueKey!].prompt);
- expect(completionRate).toBe(tokenValues[valueKey!].completion);
+ expect(promptRate).toBe(tokenValues[valueKey].prompt);
+ expect(completionRate).toBe(tokenValues[valueKey].completion);
});
});
it('should map to the correct model keys', () => {
const expected = {
'gemini-3': 'gemini-3',
- 'gemini-3.1-pro-preview': 'gemini-3.1',
- 'gemini-3.1-pro-preview-customtools': 'gemini-3.1',
- 'gemini-3.1-flash-lite-preview': 'gemini-3.1-flash-lite',
'gemini-2.5-pro': 'gemini-2.5-pro',
'gemini-2.5-flash': 'gemini-2.5-flash',
'gemini-2.5-flash-lite': 'gemini-2.5-flash-lite',
@@ -1587,190 +1432,6 @@ describe('Google Model Tests', () => {
).toBe(tokenValues[expected].completion);
});
});
-
- it('should return correct prompt and completion rates for Gemini 3.1', () => {
- expect(
- getMultiplier({
- model: 'gemini-3.1-pro-preview',
- tokenType: 'prompt',
- endpoint: EModelEndpoint.google,
- }),
- ).toBe(tokenValues['gemini-3.1'].prompt);
- expect(
- getMultiplier({
- model: 'gemini-3.1-pro-preview',
- tokenType: 'completion',
- endpoint: EModelEndpoint.google,
- }),
- ).toBe(tokenValues['gemini-3.1'].completion);
- expect(
- getMultiplier({
- model: 'gemini-3.1-pro-preview-customtools',
- tokenType: 'prompt',
- endpoint: EModelEndpoint.google,
- }),
- ).toBe(tokenValues['gemini-3.1'].prompt);
- expect(
- getMultiplier({
- model: 'gemini-3.1-pro-preview-customtools',
- tokenType: 'completion',
- endpoint: EModelEndpoint.google,
- }),
- ).toBe(tokenValues['gemini-3.1'].completion);
- });
-
- it('should return correct cache rates for Gemini 3.1', () => {
- ['gemini-3.1-pro-preview', 'gemini-3.1-pro-preview-customtools'].forEach((model) => {
- expect(getCacheMultiplier({ model, cacheType: 'write' })).toBe(
- cacheTokenValues['gemini-3.1'].write,
- );
- expect(getCacheMultiplier({ model, cacheType: 'read' })).toBe(
- cacheTokenValues['gemini-3.1'].read,
- );
- });
- });
-
- it('should return correct rates for Gemini 3.1 Flash-Lite', () => {
- const model = 'gemini-3.1-flash-lite-preview';
- expect(getMultiplier({ model, tokenType: 'prompt', endpoint: EModelEndpoint.google })).toBe(
- tokenValues['gemini-3.1-flash-lite'].prompt,
- );
- expect(getMultiplier({ model, tokenType: 'completion', endpoint: EModelEndpoint.google })).toBe(
- tokenValues['gemini-3.1-flash-lite'].completion,
- );
- expect(getCacheMultiplier({ model, cacheType: 'write' })).toBe(
- cacheTokenValues['gemini-3.1-flash-lite'].write,
- );
- expect(getCacheMultiplier({ model, cacheType: 'read' })).toBe(
- cacheTokenValues['gemini-3.1-flash-lite'].read,
- );
- });
-});
-
-describe('Gemini 3.1 Premium Token Pricing', () => {
- const premiumKey = 'gemini-3.1';
- const premiumEntry = premiumTokenValues[premiumKey];
- const { threshold } = premiumEntry;
- const belowThreshold = threshold - 1;
- const aboveThreshold = threshold + 1;
- const wellAboveThreshold = threshold * 2;
-
- it('should have premium pricing defined for gemini-3.1', () => {
- expect(premiumEntry).toBeDefined();
- expect(premiumEntry.threshold).toBeDefined();
- expect(premiumEntry.prompt).toBeDefined();
- expect(premiumEntry.completion).toBeDefined();
- expect(premiumEntry.prompt).toBeGreaterThan(tokenValues[premiumKey].prompt);
- expect(premiumEntry.completion).toBeGreaterThan(tokenValues[premiumKey].completion);
- });
-
- it('should return null from getPremiumRate when inputTokenCount is below or at threshold', () => {
- expect(getPremiumRate(premiumKey, 'prompt', belowThreshold)).toBeNull();
- expect(getPremiumRate(premiumKey, 'completion', belowThreshold)).toBeNull();
- expect(getPremiumRate(premiumKey, 'prompt', threshold)).toBeNull();
- });
-
- it('should return premium rate from getPremiumRate when inputTokenCount exceeds threshold', () => {
- expect(getPremiumRate(premiumKey, 'prompt', aboveThreshold)).toBe(premiumEntry.prompt);
- expect(getPremiumRate(premiumKey, 'completion', aboveThreshold)).toBe(premiumEntry.completion);
- expect(getPremiumRate(premiumKey, 'prompt', wellAboveThreshold)).toBe(premiumEntry.prompt);
- });
-
- it('should return null from getPremiumRate when inputTokenCount is undefined or null', () => {
- expect(getPremiumRate(premiumKey, 'prompt', undefined)).toBeNull();
- expect(getPremiumRate(premiumKey, 'prompt', null)).toBeNull();
- });
-
- it('should return standard rate from getMultiplier when inputTokenCount is below threshold', () => {
- expect(
- getMultiplier({
- model: 'gemini-3.1-pro-preview',
- tokenType: 'prompt',
- inputTokenCount: belowThreshold,
- }),
- ).toBe(tokenValues[premiumKey].prompt);
- expect(
- getMultiplier({
- model: 'gemini-3.1-pro-preview',
- tokenType: 'completion',
- inputTokenCount: belowThreshold,
- }),
- ).toBe(tokenValues[premiumKey].completion);
- });
-
- it('should return premium rate from getMultiplier when inputTokenCount exceeds threshold', () => {
- expect(
- getMultiplier({
- model: 'gemini-3.1-pro-preview',
- tokenType: 'prompt',
- inputTokenCount: aboveThreshold,
- }),
- ).toBe(premiumEntry.prompt);
- expect(
- getMultiplier({
- model: 'gemini-3.1-pro-preview',
- tokenType: 'completion',
- inputTokenCount: aboveThreshold,
- }),
- ).toBe(premiumEntry.completion);
- });
-
- it('should return standard rate from getMultiplier when inputTokenCount is exactly at threshold', () => {
- expect(
- getMultiplier({
- model: 'gemini-3.1-pro-preview',
- tokenType: 'prompt',
- inputTokenCount: threshold,
- }),
- ).toBe(tokenValues[premiumKey].prompt);
- });
-
- it('should apply premium pricing to customtools variant above threshold', () => {
- expect(
- getMultiplier({
- model: 'gemini-3.1-pro-preview-customtools',
- tokenType: 'prompt',
- inputTokenCount: aboveThreshold,
- }),
- ).toBe(premiumEntry.prompt);
- expect(
- getMultiplier({
- model: 'gemini-3.1-pro-preview-customtools',
- tokenType: 'completion',
- inputTokenCount: aboveThreshold,
- }),
- ).toBe(premiumEntry.completion);
- });
-
- it('should use standard rate when inputTokenCount is not provided', () => {
- expect(getMultiplier({ model: 'gemini-3.1-pro-preview', tokenType: 'prompt' })).toBe(
- tokenValues[premiumKey].prompt,
- );
- expect(getMultiplier({ model: 'gemini-3.1-pro-preview', tokenType: 'completion' })).toBe(
- tokenValues[premiumKey].completion,
- );
- });
-
- it('should apply premium pricing through getMultiplier with valueKey path', () => {
- const valueKey = getValueKey('gemini-3.1-pro-preview');
- expect(valueKey).toBe(premiumKey);
- expect(getMultiplier({ valueKey, tokenType: 'prompt', inputTokenCount: aboveThreshold })).toBe(
- premiumEntry.prompt,
- );
- expect(
- getMultiplier({ valueKey, tokenType: 'completion', inputTokenCount: aboveThreshold }),
- ).toBe(premiumEntry.completion);
- });
-
- it('should apply standard pricing through getMultiplier with valueKey path when below threshold', () => {
- const valueKey = getValueKey('gemini-3.1-pro-preview');
- expect(getMultiplier({ valueKey, tokenType: 'prompt', inputTokenCount: belowThreshold })).toBe(
- tokenValues[premiumKey].prompt,
- );
- expect(
- getMultiplier({ valueKey, tokenType: 'completion', inputTokenCount: belowThreshold }),
- ).toBe(tokenValues[premiumKey].completion);
- });
});
describe('Grok Model Tests - Pricing', () => {
@@ -2314,7 +1975,7 @@ describe('Premium Token Pricing', () => {
it('should return null from getPremiumRate when inputTokenCount is undefined or null', () => {
expect(getPremiumRate(premiumModel, 'prompt', undefined)).toBeNull();
- expect(getPremiumRate(premiumModel, 'prompt', undefined)).toBeNull();
+ expect(getPremiumRate(premiumModel, 'prompt', null)).toBeNull();
});
it('should return null from getPremiumRate for models without premium pricing', () => {
@@ -2416,5 +2077,118 @@ describe('Premium Token Pricing', () => {
});
});
-// Cross-package sync validation tests (tokens.ts ↔ tx.ts) moved to
-// packages/api tests since they require maxTokensMap from @librechat/api.
+describe('tokens.ts and tx.js sync validation', () => {
+ it('should resolve all models in maxTokensMap to pricing via getValueKey', () => {
+ const tokensKeys = Object.keys(maxTokensMap[EModelEndpoint.openAI]);
+ const txKeys = Object.keys(tokenValues);
+
+ const unresolved = [];
+
+ tokensKeys.forEach((key) => {
+ // Skip legacy token size mappings (e.g., '4k', '8k', '16k', '32k')
+ if (/^\d+k$/.test(key)) return;
+
+ // Skip generic pattern keys (end with '-' or ':')
+ if (key.endsWith('-') || key.endsWith(':')) return;
+
+ // Try to resolve via getValueKey
+ const resolvedKey = getValueKey(key);
+
+ // If it resolves and the resolved key has pricing, success
+ if (resolvedKey && txKeys.includes(resolvedKey)) return;
+
+ // If it resolves to a legacy key (4k, 8k, etc), also OK
+ if (resolvedKey && /^\d+k$/.test(resolvedKey)) return;
+
+ // If we get here, this model can't get pricing - flag it
+ unresolved.push({
+ key,
+ resolvedKey: resolvedKey || 'undefined',
+ context: maxTokensMap[EModelEndpoint.openAI][key],
+ });
+ });
+
+ if (unresolved.length > 0) {
+ console.log('\nModels that cannot resolve to pricing via getValueKey:');
+ unresolved.forEach(({ key, resolvedKey, context }) => {
+ console.log(` - '${key}' → '${resolvedKey}' (context: ${context})`);
+ });
+ }
+
+ expect(unresolved).toEqual([]);
+ });
+
+ it('should not have redundant dated variants with same pricing and context as base model', () => {
+ const txKeys = Object.keys(tokenValues);
+ const redundant = [];
+
+ txKeys.forEach((key) => {
+ // Check if this is a dated variant (ends with -YYYY-MM-DD)
+ if (key.match(/.*-\d{4}-\d{2}-\d{2}$/)) {
+ const baseKey = key.replace(/-\d{4}-\d{2}-\d{2}$/, '');
+
+ if (txKeys.includes(baseKey)) {
+ const variantPricing = tokenValues[key];
+ const basePricing = tokenValues[baseKey];
+ const variantContext = maxTokensMap[EModelEndpoint.openAI][key];
+ const baseContext = maxTokensMap[EModelEndpoint.openAI][baseKey];
+
+ const samePricing =
+ variantPricing.prompt === basePricing.prompt &&
+ variantPricing.completion === basePricing.completion;
+ const sameContext = variantContext === baseContext;
+
+ if (samePricing && sameContext) {
+ redundant.push({
+ key,
+ baseKey,
+ pricing: `${variantPricing.prompt}/${variantPricing.completion}`,
+ context: variantContext,
+ });
+ }
+ }
+ }
+ });
+
+ if (redundant.length > 0) {
+ console.log('\nRedundant dated variants found (same pricing and context as base):');
+ redundant.forEach(({ key, baseKey, pricing, context }) => {
+ console.log(` - '${key}' → '${baseKey}' (pricing: ${pricing}, context: ${context})`);
+ console.log(` Can be removed - pattern matching will handle it`);
+ });
+ }
+
+ expect(redundant).toEqual([]);
+ });
+
+ it('should have context windows in tokens.ts for all models with pricing in tx.js (openAI catch-all)', () => {
+ const txKeys = Object.keys(tokenValues);
+ const missingContext = [];
+
+ txKeys.forEach((key) => {
+ // Skip legacy token size mappings (4k, 8k, 16k, 32k)
+ if (/^\d+k$/.test(key)) return;
+
+ // Check if this model has a context window defined
+ const context = maxTokensMap[EModelEndpoint.openAI][key];
+
+ if (!context) {
+ const pricing = tokenValues[key];
+ missingContext.push({
+ key,
+ pricing: `${pricing.prompt}/${pricing.completion}`,
+ });
+ }
+ });
+
+ if (missingContext.length > 0) {
+ console.log('\nModels with pricing but missing context in tokens.ts:');
+ missingContext.forEach(({ key, pricing }) => {
+ console.log(` - '${key}' (pricing: ${pricing})`);
+ console.log(` Add to tokens.ts openAIModels/bedrockModels/etc.`);
+ });
+ }
+
+ expect(missingContext).toEqual([]);
+ });
+});
diff --git a/api/models/userMethods.js b/api/models/userMethods.js
new file mode 100644
index 0000000000..b57b24e641
--- /dev/null
+++ b/api/models/userMethods.js
@@ -0,0 +1,31 @@
+const bcrypt = require('bcryptjs');
+
+/**
+ * Compares the provided password with the user's password.
+ *
+ * @param {IUser} user - The user to compare the password for.
+ * @param {string} candidatePassword - The password to test against the user's password.
+ * @returns {Promise} A promise that resolves to a boolean indicating if the password matches.
+ */
+const comparePassword = async (user, candidatePassword) => {
+ if (!user) {
+ throw new Error('No user provided');
+ }
+
+ if (!user.password) {
+ throw new Error('No password, likely an email first registered via Social/OIDC login');
+ }
+
+ return new Promise((resolve, reject) => {
+ bcrypt.compare(candidatePassword, user.password, (err, isMatch) => {
+ if (err) {
+ reject(err);
+ }
+ resolve(isMatch);
+ });
+ });
+};
+
+module.exports = {
+ comparePassword,
+};
diff --git a/api/package.json b/api/package.json
index 86b0f22c0b..1c40ddb337 100644
--- a/api/package.json
+++ b/api/package.json
@@ -1,6 +1,6 @@
{
"name": "@librechat/backend",
- "version": "v0.8.4",
+ "version": "v0.8.3-rc1",
"description": "",
"scripts": {
"start": "echo 'please run this from the root directory'",
@@ -35,7 +35,7 @@
"homepage": "https://librechat.ai",
"dependencies": {
"@anthropic-ai/vertex-sdk": "^0.14.3",
- "@aws-sdk/client-bedrock-runtime": "^3.1013.0",
+ "@aws-sdk/client-bedrock-runtime": "^3.980.0",
"@aws-sdk/client-s3": "^3.980.0",
"@aws-sdk/s3-request-presigner": "^3.758.0",
"@azure/identity": "^4.7.0",
@@ -44,15 +44,14 @@
"@google/genai": "^1.19.0",
"@keyv/redis": "^4.3.3",
"@langchain/core": "^0.3.80",
- "@librechat/agents": "^3.1.63",
+ "@librechat/agents": "^3.1.50",
"@librechat/api": "*",
"@librechat/data-schemas": "*",
"@microsoft/microsoft-graph-client": "^3.0.7",
- "@modelcontextprotocol/sdk": "^1.27.1",
+ "@modelcontextprotocol/sdk": "^1.26.0",
"@node-saml/passport-saml": "^5.1.0",
"@smithy/node-http-handler": "^4.4.5",
- "ai-tokenizer": "^1.0.6",
- "axios": "1.13.6",
+ "axios": "^1.13.5",
"bcryptjs": "^2.4.3",
"compression": "^1.8.1",
"connect-redis": "^8.1.0",
@@ -64,13 +63,13 @@
"eventsource": "^3.0.2",
"express": "^5.2.1",
"express-mongo-sanitize": "^2.2.0",
- "express-rate-limit": "^8.3.0",
+ "express-rate-limit": "^8.2.1",
"express-session": "^1.18.2",
"express-static-gzip": "^2.2.0",
- "file-type": "^21.3.2",
+ "file-type": "^18.7.0",
"firebase": "^11.0.2",
"form-data": "^4.0.4",
- "handlebars": "^4.7.9",
+ "handlebars": "^4.7.7",
"https-proxy-agent": "^7.0.6",
"ioredis": "^5.3.2",
"js-yaml": "^4.1.1",
@@ -81,17 +80,16 @@
"klona": "^2.0.6",
"librechat-data-provider": "*",
"lodash": "^4.17.23",
- "mammoth": "^1.11.0",
"mathjs": "^15.1.0",
"meilisearch": "^0.38.0",
"memorystore": "^1.6.7",
"mime": "^3.0.0",
"module-alias": "^2.2.3",
"mongoose": "^8.12.1",
- "multer": "^2.1.1",
+ "multer": "^2.0.2",
"nanoid": "^3.3.7",
"node-fetch": "^2.7.0",
- "nodemailer": "^8.0.4",
+ "nodemailer": "^7.0.11",
"ollama": "^0.5.0",
"openai": "5.8.2",
"openid-client": "^6.5.0",
@@ -104,16 +102,14 @@
"passport-jwt": "^4.0.1",
"passport-ldapauth": "^3.0.1",
"passport-local": "^1.0.0",
- "pdfjs-dist": "^5.4.624",
"rate-limit-redis": "^4.2.0",
"sharp": "^0.33.5",
+ "tiktoken": "^1.0.15",
"traverse": "^0.6.7",
"ua-parser-js": "^1.0.36",
- "undici": "^7.24.1",
+ "undici": "^7.18.2",
"winston": "^3.11.0",
"winston-daily-rotate-file": "^5.0.0",
- "xlsx": "https://cdn.sheetjs.com/xlsx-0.20.3/xlsx-0.20.3.tgz",
- "yauzl": "^3.2.1",
"zod": "^3.22.4"
},
"devDependencies": {
diff --git a/api/server/cleanup.js b/api/server/cleanup.js
index c27814292d..c482a2267e 100644
--- a/api/server/cleanup.js
+++ b/api/server/cleanup.js
@@ -35,6 +35,7 @@ const graphPropsToClean = [
'tools',
'signal',
'config',
+ 'agentContexts',
'messages',
'contentData',
'stepKeyIds',
@@ -123,6 +124,9 @@ function disposeClient(client) {
if (client.maxContextTokens) {
client.maxContextTokens = null;
}
+ if (client.contextStrategy) {
+ client.contextStrategy = null;
+ }
if (client.currentDateString) {
client.currentDateString = null;
}
@@ -273,16 +277,7 @@ function disposeClient(client) {
if (client.run) {
if (client.run.Graph) {
- if (typeof client.run.Graph.clearHeavyState === 'function') {
- client.run.Graph.clearHeavyState();
- } else {
- client.run.Graph.resetValues();
- }
-
- if (client.run.Graph.agentContexts) {
- client.run.Graph.agentContexts.clear();
- client.run.Graph.agentContexts = null;
- }
+ client.run.Graph.resetValues();
graphPropsToClean.forEach((prop) => {
if (client.run.Graph[prop] !== undefined) {
diff --git a/api/server/controllers/AuthController.js b/api/server/controllers/AuthController.js
index eb44feffa4..588391b535 100644
--- a/api/server/controllers/AuthController.js
+++ b/api/server/controllers/AuthController.js
@@ -18,7 +18,7 @@ const {
findUser,
} = require('~/models');
const { getGraphApiToken } = require('~/server/services/GraphTokenService');
-const { getOpenIdConfig, getOpenIdEmail } = require('~/strategies');
+const { getOpenIdConfig } = require('~/strategies');
const registrationController = async (req, res) => {
try {
@@ -87,7 +87,7 @@ const refreshController = async (req, res) => {
const claims = tokenset.claims();
const { user, error, migration } = await findOpenIDUser({
findUser,
- email: getOpenIdEmail(claims),
+ email: claims.email,
openidId: claims.sub,
idOnTheSource: claims.oid,
strategyName: 'refreshController',
@@ -119,8 +119,14 @@ const refreshController = async (req, res) => {
const token = setOpenIDAuthTokens(tokenset, req, res, user._id.toString(), refreshToken);
- const { password: _pw, __v: _v, totpSecret: _ts, backupCodes: _bc, ...safeUser } = user;
- return res.status(200).send({ token, user: safeUser });
+ user.federatedTokens = {
+ access_token: tokenset.access_token,
+ id_token: tokenset.id_token,
+ refresh_token: refreshToken,
+ expires_at: claims.exp,
+ };
+
+ return res.status(200).send({ token, user });
} catch (error) {
logger.error('[refreshController] OpenID token refresh error', error);
return res.status(403).send('Invalid OpenID refresh token');
@@ -190,6 +196,15 @@ const graphTokenController = async (req, res) => {
});
}
+ // Extract access token from Authorization header
+ const authHeader = req.headers.authorization;
+ if (!authHeader || !authHeader.startsWith('Bearer ')) {
+ return res.status(401).json({
+ message: 'Valid authorization token required',
+ });
+ }
+
+ // Get scopes from query parameters
const scopes = req.query.scopes;
if (!scopes) {
return res.status(400).json({
@@ -197,13 +212,7 @@ const graphTokenController = async (req, res) => {
});
}
- const accessToken = req.user.federatedTokens?.access_token;
- if (!accessToken) {
- return res.status(401).json({
- message: 'No federated access token available for token exchange',
- });
- }
-
+ const accessToken = authHeader.substring(7); // Remove 'Bearer ' prefix
const tokenResponse = await getGraphApiToken(req.user, accessToken, scopes);
res.json(tokenResponse);
diff --git a/api/server/controllers/AuthController.spec.js b/api/server/controllers/AuthController.spec.js
deleted file mode 100644
index 964947def9..0000000000
--- a/api/server/controllers/AuthController.spec.js
+++ /dev/null
@@ -1,318 +0,0 @@
-jest.mock('@librechat/data-schemas', () => ({
- logger: { error: jest.fn(), debug: jest.fn(), warn: jest.fn(), info: jest.fn() },
-}));
-jest.mock('~/server/services/GraphTokenService', () => ({
- getGraphApiToken: jest.fn(),
-}));
-jest.mock('~/server/services/AuthService', () => ({
- requestPasswordReset: jest.fn(),
- setOpenIDAuthTokens: jest.fn(),
- resetPassword: jest.fn(),
- setAuthTokens: jest.fn(),
- registerUser: jest.fn(),
-}));
-jest.mock('~/strategies', () => ({ getOpenIdConfig: jest.fn(), getOpenIdEmail: jest.fn() }));
-jest.mock('openid-client', () => ({ refreshTokenGrant: jest.fn() }));
-jest.mock('~/models', () => ({
- deleteAllUserSessions: jest.fn(),
- getUserById: jest.fn(),
- findSession: jest.fn(),
- updateUser: jest.fn(),
- findUser: jest.fn(),
-}));
-jest.mock('@librechat/api', () => ({
- isEnabled: jest.fn(),
- findOpenIDUser: jest.fn(),
-}));
-
-const openIdClient = require('openid-client');
-const { isEnabled, findOpenIDUser } = require('@librechat/api');
-const { graphTokenController, refreshController } = require('./AuthController');
-const { getGraphApiToken } = require('~/server/services/GraphTokenService');
-const { setOpenIDAuthTokens } = require('~/server/services/AuthService');
-const { getOpenIdConfig, getOpenIdEmail } = require('~/strategies');
-const { updateUser } = require('~/models');
-
-describe('graphTokenController', () => {
- let req, res;
-
- beforeEach(() => {
- jest.clearAllMocks();
- isEnabled.mockReturnValue(true);
-
- req = {
- user: {
- openidId: 'oid-123',
- provider: 'openid',
- federatedTokens: {
- access_token: 'federated-access-token',
- id_token: 'federated-id-token',
- },
- },
- headers: { authorization: 'Bearer app-jwt-which-is-id-token' },
- query: { scopes: 'https://graph.microsoft.com/.default' },
- };
-
- res = {
- status: jest.fn().mockReturnThis(),
- json: jest.fn(),
- };
-
- getGraphApiToken.mockResolvedValue({
- access_token: 'graph-access-token',
- token_type: 'Bearer',
- expires_in: 3600,
- });
- });
-
- it('should pass federatedTokens.access_token as OBO assertion, not the auth header bearer token', async () => {
- await graphTokenController(req, res);
-
- expect(getGraphApiToken).toHaveBeenCalledWith(
- req.user,
- 'federated-access-token',
- 'https://graph.microsoft.com/.default',
- );
- expect(getGraphApiToken).not.toHaveBeenCalledWith(
- expect.anything(),
- 'app-jwt-which-is-id-token',
- expect.anything(),
- );
- });
-
- it('should return the graph token response on success', async () => {
- await graphTokenController(req, res);
-
- expect(res.json).toHaveBeenCalledWith({
- access_token: 'graph-access-token',
- token_type: 'Bearer',
- expires_in: 3600,
- });
- });
-
- it('should return 403 when user is not authenticated via Entra ID', async () => {
- req.user.provider = 'google';
- req.user.openidId = undefined;
-
- await graphTokenController(req, res);
-
- expect(res.status).toHaveBeenCalledWith(403);
- expect(getGraphApiToken).not.toHaveBeenCalled();
- });
-
- it('should return 403 when OPENID_REUSE_TOKENS is not enabled', async () => {
- isEnabled.mockReturnValue(false);
-
- await graphTokenController(req, res);
-
- expect(res.status).toHaveBeenCalledWith(403);
- expect(getGraphApiToken).not.toHaveBeenCalled();
- });
-
- it('should return 400 when scopes query param is missing', async () => {
- req.query.scopes = undefined;
-
- await graphTokenController(req, res);
-
- expect(res.status).toHaveBeenCalledWith(400);
- expect(getGraphApiToken).not.toHaveBeenCalled();
- });
-
- it('should return 401 when federatedTokens.access_token is missing', async () => {
- req.user.federatedTokens = {};
-
- await graphTokenController(req, res);
-
- expect(res.status).toHaveBeenCalledWith(401);
- expect(getGraphApiToken).not.toHaveBeenCalled();
- });
-
- it('should return 401 when federatedTokens is absent entirely', async () => {
- req.user.federatedTokens = undefined;
-
- await graphTokenController(req, res);
-
- expect(res.status).toHaveBeenCalledWith(401);
- expect(getGraphApiToken).not.toHaveBeenCalled();
- });
-
- it('should return 500 when getGraphApiToken throws', async () => {
- getGraphApiToken.mockRejectedValue(new Error('OBO exchange failed'));
-
- await graphTokenController(req, res);
-
- expect(res.status).toHaveBeenCalledWith(500);
- expect(res.json).toHaveBeenCalledWith({
- message: 'Failed to obtain Microsoft Graph token',
- });
- });
-});
-
-describe('refreshController – OpenID path', () => {
- const mockTokenset = {
- claims: jest.fn(),
- access_token: 'new-access',
- id_token: 'new-id',
- refresh_token: 'new-refresh',
- };
-
- const baseClaims = {
- sub: 'oidc-sub-123',
- oid: 'oid-456',
- email: 'user@example.com',
- exp: 9999999999,
- };
-
- const defaultUser = {
- _id: 'user-db-id',
- email: baseClaims.email,
- openidId: baseClaims.sub,
- password: '$2b$10$hashedpassword',
- __v: 0,
- totpSecret: 'encrypted-totp-secret',
- backupCodes: ['hashed-code-1', 'hashed-code-2'],
- };
-
- let req, res;
-
- beforeEach(() => {
- jest.clearAllMocks();
-
- isEnabled.mockReturnValue(true);
- getOpenIdConfig.mockReturnValue({ some: 'config' });
- openIdClient.refreshTokenGrant.mockResolvedValue(mockTokenset);
- mockTokenset.claims.mockReturnValue(baseClaims);
- getOpenIdEmail.mockReturnValue(baseClaims.email);
- setOpenIDAuthTokens.mockReturnValue('new-app-token');
- findOpenIDUser.mockResolvedValue({ user: { ...defaultUser }, error: null, migration: false });
- updateUser.mockResolvedValue({});
-
- req = {
- headers: { cookie: 'token_provider=openid; refreshToken=stored-refresh' },
- session: {},
- };
-
- res = {
- status: jest.fn().mockReturnThis(),
- send: jest.fn().mockReturnThis(),
- redirect: jest.fn(),
- };
- });
-
- it('should call getOpenIdEmail with token claims and use result for findOpenIDUser', async () => {
- await refreshController(req, res);
-
- expect(getOpenIdEmail).toHaveBeenCalledWith(baseClaims);
- expect(findOpenIDUser).toHaveBeenCalledWith(
- expect.objectContaining({ email: baseClaims.email }),
- );
- expect(res.status).toHaveBeenCalledWith(200);
- });
-
- it('should use OPENID_EMAIL_CLAIM-resolved value when claim is present in token', async () => {
- const claimsWithUpn = { ...baseClaims, upn: 'user@corp.example.com' };
- mockTokenset.claims.mockReturnValue(claimsWithUpn);
- getOpenIdEmail.mockReturnValue('user@corp.example.com');
-
- const user = {
- _id: 'user-db-id',
- email: 'user@corp.example.com',
- openidId: baseClaims.sub,
- };
- findOpenIDUser.mockResolvedValue({ user, error: null, migration: false });
-
- await refreshController(req, res);
-
- expect(getOpenIdEmail).toHaveBeenCalledWith(claimsWithUpn);
- expect(findOpenIDUser).toHaveBeenCalledWith(
- expect.objectContaining({ email: 'user@corp.example.com' }),
- );
- expect(res.status).toHaveBeenCalledWith(200);
- });
-
- it('should fall back to claims.email when configured claim is absent from token claims', async () => {
- getOpenIdEmail.mockReturnValue(baseClaims.email);
-
- await refreshController(req, res);
-
- expect(findOpenIDUser).toHaveBeenCalledWith(
- expect.objectContaining({ email: baseClaims.email }),
- );
- });
-
- it('should not expose sensitive fields or federatedTokens in refresh response', async () => {
- await refreshController(req, res);
-
- const sentPayload = res.send.mock.calls[0][0];
- expect(sentPayload).toEqual({
- token: 'new-app-token',
- user: expect.objectContaining({
- _id: 'user-db-id',
- email: baseClaims.email,
- openidId: baseClaims.sub,
- }),
- });
- expect(sentPayload.user).not.toHaveProperty('federatedTokens');
- expect(sentPayload.user).not.toHaveProperty('password');
- expect(sentPayload.user).not.toHaveProperty('totpSecret');
- expect(sentPayload.user).not.toHaveProperty('backupCodes');
- expect(sentPayload.user).not.toHaveProperty('__v');
- });
-
- it('should update openidId when migration is triggered on refresh', async () => {
- const user = { _id: 'user-db-id', email: baseClaims.email, openidId: null };
- findOpenIDUser.mockResolvedValue({ user, error: null, migration: true });
-
- await refreshController(req, res);
-
- expect(updateUser).toHaveBeenCalledWith(
- 'user-db-id',
- expect.objectContaining({ provider: 'openid', openidId: baseClaims.sub }),
- );
- expect(res.status).toHaveBeenCalledWith(200);
- });
-
- it('should return 401 and redirect to /login when findOpenIDUser returns no user', async () => {
- findOpenIDUser.mockResolvedValue({ user: null, error: null, migration: false });
-
- await refreshController(req, res);
-
- expect(res.status).toHaveBeenCalledWith(401);
- expect(res.redirect).toHaveBeenCalledWith('/login');
- });
-
- it('should return 401 and redirect when findOpenIDUser returns an error', async () => {
- findOpenIDUser.mockResolvedValue({ user: null, error: 'AUTH_FAILED', migration: false });
-
- await refreshController(req, res);
-
- expect(res.status).toHaveBeenCalledWith(401);
- expect(res.redirect).toHaveBeenCalledWith('/login');
- });
-
- it('should skip OpenID path when token_provider is not openid', async () => {
- req.headers.cookie = 'token_provider=local; refreshToken=some-token';
-
- await refreshController(req, res);
-
- expect(openIdClient.refreshTokenGrant).not.toHaveBeenCalled();
- });
-
- it('should skip OpenID path when OPENID_REUSE_TOKENS is disabled', async () => {
- isEnabled.mockReturnValue(false);
-
- await refreshController(req, res);
-
- expect(openIdClient.refreshTokenGrant).not.toHaveBeenCalled();
- });
-
- it('should return 200 with token not provided when refresh token is absent', async () => {
- req.headers.cookie = 'token_provider=openid';
- req.session = {};
-
- await refreshController(req, res);
-
- expect(res.status).toHaveBeenCalledWith(200);
- expect(res.send).toHaveBeenCalledWith('Refresh token not provided');
- });
-});
diff --git a/api/server/controllers/Balance.js b/api/server/controllers/Balance.js
index fd9b32e74c..c892a73b0c 100644
--- a/api/server/controllers/Balance.js
+++ b/api/server/controllers/Balance.js
@@ -1,22 +1,24 @@
-const { findBalanceByUser } = require('~/models');
+const { Balance } = require('~/db/models');
async function balanceController(req, res) {
- const balanceData = await findBalanceByUser(req.user.id);
+ const balanceData = await Balance.findOne(
+ { user: req.user.id },
+ '-_id tokenCredits autoRefillEnabled refillIntervalValue refillIntervalUnit lastRefill refillAmount',
+ ).lean();
if (!balanceData) {
return res.status(404).json({ error: 'Balance not found' });
}
- const { _id: _, ...result } = balanceData;
-
- if (!result.autoRefillEnabled) {
- delete result.refillIntervalValue;
- delete result.refillIntervalUnit;
- delete result.lastRefill;
- delete result.refillAmount;
+ // If auto-refill is not enabled, remove auto-refill related fields from the response
+ if (!balanceData.autoRefillEnabled) {
+ delete balanceData.refillIntervalValue;
+ delete balanceData.refillIntervalUnit;
+ delete balanceData.lastRefill;
+ delete balanceData.refillAmount;
}
- res.status(200).json(result);
+ res.status(200).json(balanceData);
}
module.exports = balanceController;
diff --git a/api/server/controllers/ModelController.js b/api/server/controllers/ModelController.js
index 4738d45111..805d9eef27 100644
--- a/api/server/controllers/ModelController.js
+++ b/api/server/controllers/ModelController.js
@@ -1,12 +1,40 @@
const { logger } = require('@librechat/data-schemas');
+const { CacheKeys } = require('librechat-data-provider');
const { loadDefaultModels, loadConfigModels } = require('~/server/services/Config');
+const { getLogStores } = require('~/cache');
-const getModelsConfig = (req) => loadModels(req);
+/**
+ * @param {ServerRequest} req
+ * @returns {Promise} The models config.
+ */
+const getModelsConfig = async (req) => {
+ const cache = getLogStores(CacheKeys.CONFIG_STORE);
+ let modelsConfig = await cache.get(CacheKeys.MODELS_CONFIG);
+ if (!modelsConfig) {
+ modelsConfig = await loadModels(req);
+ }
+ return modelsConfig;
+};
+
+/**
+ * Loads the models from the config.
+ * @param {ServerRequest} req - The Express request object.
+ * @returns {Promise} The models config.
+ */
async function loadModels(req) {
+ const cache = getLogStores(CacheKeys.CONFIG_STORE);
+ const cachedModelsConfig = await cache.get(CacheKeys.MODELS_CONFIG);
+ if (cachedModelsConfig) {
+ return cachedModelsConfig;
+ }
const defaultModelsConfig = await loadDefaultModels(req);
const customModelsConfig = await loadConfigModels(req);
- return { ...defaultModelsConfig, ...customModelsConfig };
+
+ const modelConfig = { ...defaultModelsConfig, ...customModelsConfig };
+
+ await cache.set(CacheKeys.MODELS_CONFIG, modelConfig);
+ return modelConfig;
}
async function modelController(req, res) {
diff --git a/api/server/controllers/PermissionsController.js b/api/server/controllers/PermissionsController.js
index 1f200fce83..51993d083c 100644
--- a/api/server/controllers/PermissionsController.js
+++ b/api/server/controllers/PermissionsController.js
@@ -9,17 +9,22 @@ const { enrichRemoteAgentPrincipals, backfillRemoteAgentPermissions } = require(
const {
bulkUpdateResourcePermissions,
ensureGroupPrincipalExists,
- getResourcePermissionsMap,
- findAccessibleResources,
getEffectivePermissions,
ensurePrincipalExists,
getAvailableRoles,
+ findAccessibleResources,
+ getResourcePermissionsMap,
} = require('~/server/services/PermissionService');
+const {
+ searchPrincipals: searchLocalPrincipals,
+ sortPrincipalsByRelevance,
+ calculateRelevanceScore,
+} = require('~/models');
const {
entraIdPrincipalFeatureEnabled,
searchEntraIdPrincipals,
} = require('~/server/services/GraphApiService');
-const db = require('~/models');
+const { AclEntry, AccessRole } = require('~/db/models');
/**
* Generic controller for resource permission endpoints
@@ -150,18 +155,6 @@ const updateResourcePermissions = async (req, res) => {
grantedBy: userId,
});
- const isAgentResource =
- resourceType === ResourceType.AGENT || resourceType === ResourceType.REMOTE_AGENT;
- const revokedUserIds = results.revoked
- .filter((p) => p.type === PrincipalType.USER && p.id)
- .map((p) => p.id);
-
- if (isAgentResource && revokedUserIds.length > 0) {
- db.removeAgentFromUserFavorites(resourceId, revokedUserIds).catch((err) => {
- logger.error('[removeRevokedAgentFromFavorites] Error cleaning up favorites', err);
- });
- }
-
/** @type {TUpdateResourcePermissionsResponse} */
const response = {
message: 'Permissions updated successfully',
@@ -192,7 +185,8 @@ const getResourcePermissions = async (req, res) => {
const { resourceType, resourceId } = req.params;
validateResourceType(resourceType);
- const results = await db.aggregateAclEntries([
+ // Use aggregation pipeline for efficient single-query data retrieval
+ const results = await AclEntry.aggregate([
// Match ACL entries for this resource
{
$match: {
@@ -288,12 +282,7 @@ const getResourcePermissions = async (req, res) => {
}
if (resourceType === ResourceType.REMOTE_AGENT) {
- const enricherDeps = {
- aggregateAclEntries: db.aggregateAclEntries,
- bulkWriteAclEntries: db.bulkWriteAclEntries,
- findRoleByIdentifier: db.findRoleByIdentifier,
- logger,
- };
+ const enricherDeps = { AclEntry, AccessRole, logger };
const enrichResult = await enrichRemoteAgentPrincipals(enricherDeps, resourceId, principals);
principals = enrichResult.principals;
backfillRemoteAgentPermissions(enricherDeps, resourceId, enrichResult.entriesToBackfill);
@@ -410,7 +399,7 @@ const searchPrincipals = async (req, res) => {
typeFilters = validTypes.length > 0 ? validTypes : null;
}
- const localResults = await db.searchPrincipals(query.trim(), searchLimit, typeFilters);
+ const localResults = await searchLocalPrincipals(query.trim(), searchLimit, typeFilters);
let allPrincipals = [...localResults];
const useEntraId = entraIdPrincipalFeatureEnabled(req.user);
@@ -466,11 +455,10 @@ const searchPrincipals = async (req, res) => {
}
const scoredResults = allPrincipals.map((item) => ({
...item,
- _searchScore: db.calculateRelevanceScore(item, query.trim()),
+ _searchScore: calculateRelevanceScore(item, query.trim()),
}));
- const finalResults = db
- .sortPrincipalsByRelevance(scoredResults)
+ const finalResults = sortPrincipalsByRelevance(scoredResults)
.slice(0, searchLimit)
.map((result) => {
const { _searchScore, ...resultWithoutScore } = result;
diff --git a/api/server/controllers/PluginController.js b/api/server/controllers/PluginController.js
index c5d5c5b888..279ffb15fd 100644
--- a/api/server/controllers/PluginController.js
+++ b/api/server/controllers/PluginController.js
@@ -1,37 +1,61 @@
const { logger } = require('@librechat/data-schemas');
+const { CacheKeys } = require('librechat-data-provider');
const { getToolkitKey, checkPluginAuth, filterUniquePlugins } = require('@librechat/api');
const { getCachedTools, setCachedTools } = require('~/server/services/Config');
const { availableTools, toolkits } = require('~/app/clients/tools');
const { getAppConfig } = require('~/server/services/Config');
+const { getLogStores } = require('~/cache');
const getAvailablePluginsController = async (req, res) => {
try {
- const appConfig = await getAppConfig({ role: req.user?.role, tenantId: req.user?.tenantId });
- const { filteredTools = [], includedTools = [] } = appConfig;
-
- const uniquePlugins = filterUniquePlugins(availableTools);
- const includeSet = new Set(includedTools);
- const filterSet = new Set(filteredTools);
-
- /** includedTools takes precedence — filteredTools ignored when both are set. */
- const plugins = [];
- for (const plugin of uniquePlugins) {
- if (includeSet.size > 0) {
- if (!includeSet.has(plugin.pluginKey)) {
- continue;
- }
- } else if (filterSet.has(plugin.pluginKey)) {
- continue;
- }
- plugins.push(checkPluginAuth(plugin) ? { ...plugin, authenticated: true } : plugin);
+ const cache = getLogStores(CacheKeys.TOOL_CACHE);
+ const cachedPlugins = await cache.get(CacheKeys.PLUGINS);
+ if (cachedPlugins) {
+ res.status(200).json(cachedPlugins);
+ return;
}
+ const appConfig = await getAppConfig({ role: req.user?.role });
+ /** @type {{ filteredTools: string[], includedTools: string[] }} */
+ const { filteredTools = [], includedTools = [] } = appConfig;
+ /** @type {import('@librechat/api').LCManifestTool[]} */
+ const pluginManifest = availableTools;
+
+ const uniquePlugins = filterUniquePlugins(pluginManifest);
+ let authenticatedPlugins = [];
+ for (const plugin of uniquePlugins) {
+ authenticatedPlugins.push(
+ checkPluginAuth(plugin) ? { ...plugin, authenticated: true } : plugin,
+ );
+ }
+
+ let plugins = authenticatedPlugins;
+
+ if (includedTools.length > 0) {
+ plugins = plugins.filter((plugin) => includedTools.includes(plugin.pluginKey));
+ } else {
+ plugins = plugins.filter((plugin) => !filteredTools.includes(plugin.pluginKey));
+ }
+
+ await cache.set(CacheKeys.PLUGINS, plugins);
res.status(200).json(plugins);
} catch (error) {
res.status(500).json({ message: error.message });
}
};
+/**
+ * Retrieves and returns a list of available tools, either from a cache or by reading a plugin manifest file.
+ *
+ * This function first attempts to retrieve the list of tools from a cache. If the tools are not found in the cache,
+ * it reads a plugin manifest file, filters for unique plugins, and determines if each plugin is authenticated.
+ * Only plugins that are marked as available in the application's local state are included in the final list.
+ * The resulting list of tools is then cached and sent to the client.
+ *
+ * @param {object} req - The request object, containing information about the HTTP request.
+ * @param {object} res - The response object, used to send back the desired HTTP response.
+ * @returns {Promise} A promise that resolves when the function has completed.
+ */
const getAvailableTools = async (req, res) => {
try {
const userId = req.user?.id;
@@ -39,10 +63,18 @@ const getAvailableTools = async (req, res) => {
logger.warn('[getAvailableTools] User ID not found in request');
return res.status(401).json({ message: 'Unauthorized' });
}
+ const cache = getLogStores(CacheKeys.TOOL_CACHE);
+ const cachedToolsArray = await cache.get(CacheKeys.TOOLS);
- const appConfig =
- req.config ?? (await getAppConfig({ role: req.user?.role, tenantId: req.user?.tenantId }));
+ const appConfig = req.config ?? (await getAppConfig({ role: req.user?.role }));
+ // Return early if we have cached tools
+ if (cachedToolsArray != null) {
+ res.status(200).json(cachedToolsArray);
+ return;
+ }
+
+ /** @type {Record | null} Get tool definitions to filter which tools are actually available */
let toolDefinitions = await getCachedTools();
if (toolDefinitions == null && appConfig?.availableTools != null) {
@@ -51,17 +83,26 @@ const getAvailableTools = async (req, res) => {
toolDefinitions = appConfig.availableTools;
}
- const uniquePlugins = filterUniquePlugins(availableTools);
- const toolDefKeysList = toolDefinitions ? Object.keys(toolDefinitions) : null;
- const toolDefKeys = toolDefKeysList ? new Set(toolDefKeysList) : null;
+ /** @type {import('@librechat/api').LCManifestTool[]} */
+ let pluginManifest = availableTools;
+ /** @type {TPlugin[]} Deduplicate and authenticate plugins */
+ const uniquePlugins = filterUniquePlugins(pluginManifest);
+ const authenticatedPlugins = uniquePlugins.map((plugin) => {
+ if (checkPluginAuth(plugin)) {
+ return { ...plugin, authenticated: true };
+ } else {
+ return plugin;
+ }
+ });
+
+ /** Filter plugins based on availability */
const toolsOutput = [];
- for (const plugin of uniquePlugins) {
- const isToolDefined = toolDefKeys?.has(plugin.pluginKey) === true;
+ for (const plugin of authenticatedPlugins) {
+ const isToolDefined = toolDefinitions?.[plugin.pluginKey] !== undefined;
const isToolkit =
plugin.toolkit === true &&
- toolDefKeysList != null &&
- toolDefKeysList.some(
+ Object.keys(toolDefinitions ?? {}).some(
(key) => getToolkitKey({ toolkits, toolName: key }) === plugin.pluginKey,
);
@@ -69,10 +110,13 @@ const getAvailableTools = async (req, res) => {
continue;
}
- toolsOutput.push(checkPluginAuth(plugin) ? { ...plugin, authenticated: true } : plugin);
+ toolsOutput.push(plugin);
}
- res.status(200).json(toolsOutput);
+ const finalTools = filterUniquePlugins(toolsOutput);
+ await cache.set(CacheKeys.TOOLS, finalTools);
+
+ res.status(200).json(finalTools);
} catch (error) {
logger.error('[getAvailableTools]', error);
res.status(500).json({ message: error.message });
diff --git a/api/server/controllers/PluginController.spec.js b/api/server/controllers/PluginController.spec.js
index 9288680567..06a51a3bd6 100644
--- a/api/server/controllers/PluginController.spec.js
+++ b/api/server/controllers/PluginController.spec.js
@@ -1,4 +1,6 @@
+const { CacheKeys } = require('librechat-data-provider');
const { getCachedTools, getAppConfig } = require('~/server/services/Config');
+const { getLogStores } = require('~/cache');
jest.mock('@librechat/data-schemas', () => ({
logger: {
@@ -17,15 +19,22 @@ jest.mock('~/server/services/Config', () => ({
setCachedTools: jest.fn(),
}));
+// loadAndFormatTools mock removed - no longer used in PluginController
+// getMCPManager mock removed - no longer used in PluginController
+
jest.mock('~/app/clients/tools', () => ({
availableTools: [],
toolkits: [],
}));
+jest.mock('~/cache', () => ({
+ getLogStores: jest.fn(),
+}));
+
const { getAvailableTools, getAvailablePluginsController } = require('./PluginController');
describe('PluginController', () => {
- let mockReq, mockRes;
+ let mockReq, mockRes, mockCache;
beforeEach(() => {
jest.clearAllMocks();
@@ -37,12 +46,17 @@ describe('PluginController', () => {
},
};
mockRes = { status: jest.fn().mockReturnThis(), json: jest.fn() };
+ mockCache = { get: jest.fn(), set: jest.fn() };
+ getLogStores.mockReturnValue(mockCache);
+ // Clear availableTools and toolkits arrays before each test
require('~/app/clients/tools').availableTools.length = 0;
require('~/app/clients/tools').toolkits.length = 0;
+ // Reset getCachedTools mock to ensure clean state
getCachedTools.mockReset();
+ // Reset getAppConfig mock to ensure clean state with default values
getAppConfig.mockReset();
getAppConfig.mockResolvedValue({
filteredTools: [],
@@ -50,8 +64,31 @@ describe('PluginController', () => {
});
});
+ describe('cache namespace', () => {
+ it('getAvailablePluginsController should use TOOL_CACHE namespace', async () => {
+ mockCache.get.mockResolvedValue([]);
+ await getAvailablePluginsController(mockReq, mockRes);
+ expect(getLogStores).toHaveBeenCalledWith(CacheKeys.TOOL_CACHE);
+ });
+
+ it('getAvailableTools should use TOOL_CACHE namespace', async () => {
+ mockCache.get.mockResolvedValue([]);
+ await getAvailableTools(mockReq, mockRes);
+ expect(getLogStores).toHaveBeenCalledWith(CacheKeys.TOOL_CACHE);
+ });
+
+ it('should NOT use CONFIG_STORE namespace for tool/plugin operations', async () => {
+ mockCache.get.mockResolvedValue([]);
+ await getAvailablePluginsController(mockReq, mockRes);
+ await getAvailableTools(mockReq, mockRes);
+ const allCalls = getLogStores.mock.calls.flat();
+ expect(allCalls).not.toContain(CacheKeys.CONFIG_STORE);
+ });
+ });
+
describe('getAvailablePluginsController', () => {
it('should use filterUniquePlugins to remove duplicate plugins', async () => {
+ // Add plugins with duplicates to availableTools
const mockPlugins = [
{ name: 'Plugin1', pluginKey: 'key1', description: 'First' },
{ name: 'Plugin1', pluginKey: 'key1', description: 'First duplicate' },
@@ -60,6 +97,9 @@ describe('PluginController', () => {
require('~/app/clients/tools').availableTools.push(...mockPlugins);
+ mockCache.get.mockResolvedValue(null);
+
+ // Configure getAppConfig to return the expected config
getAppConfig.mockResolvedValueOnce({
filteredTools: [],
includedTools: [],
@@ -69,16 +109,21 @@ describe('PluginController', () => {
expect(mockRes.status).toHaveBeenCalledWith(200);
const responseData = mockRes.json.mock.calls[0][0];
+ // The real filterUniquePlugins should have removed the duplicate
expect(responseData).toHaveLength(2);
expect(responseData[0].pluginKey).toBe('key1');
expect(responseData[1].pluginKey).toBe('key2');
});
it('should use checkPluginAuth to verify plugin authentication', async () => {
+ // checkPluginAuth returns false for plugins without authConfig
+ // so authenticated property won't be added
const mockPlugin = { name: 'Plugin1', pluginKey: 'key1', description: 'First' };
require('~/app/clients/tools').availableTools.push(mockPlugin);
+ mockCache.get.mockResolvedValue(null);
+ // Configure getAppConfig to return the expected config
getAppConfig.mockResolvedValueOnce({
filteredTools: [],
includedTools: [],
@@ -87,9 +132,23 @@ describe('PluginController', () => {
await getAvailablePluginsController(mockReq, mockRes);
const responseData = mockRes.json.mock.calls[0][0];
+ // The real checkPluginAuth returns false for plugins without authConfig, so authenticated property is not added
expect(responseData[0].authenticated).toBeUndefined();
});
+ it('should return cached plugins when available', async () => {
+ const cachedPlugins = [
+ { name: 'CachedPlugin', pluginKey: 'cached', description: 'Cached plugin' },
+ ];
+
+ mockCache.get.mockResolvedValue(cachedPlugins);
+
+ await getAvailablePluginsController(mockReq, mockRes);
+
+ // When cache is hit, we return immediately without processing
+ expect(mockRes.json).toHaveBeenCalledWith(cachedPlugins);
+ });
+
it('should filter plugins based on includedTools', async () => {
const mockPlugins = [
{ name: 'Plugin1', pluginKey: 'key1', description: 'First' },
@@ -97,7 +156,9 @@ describe('PluginController', () => {
];
require('~/app/clients/tools').availableTools.push(...mockPlugins);
+ mockCache.get.mockResolvedValue(null);
+ // Configure getAppConfig to return config with includedTools
getAppConfig.mockResolvedValueOnce({
filteredTools: [],
includedTools: ['key1'],
@@ -109,47 +170,6 @@ describe('PluginController', () => {
expect(responseData).toHaveLength(1);
expect(responseData[0].pluginKey).toBe('key1');
});
-
- it('should exclude plugins in filteredTools', async () => {
- const mockPlugins = [
- { name: 'Plugin1', pluginKey: 'key1', description: 'First' },
- { name: 'Plugin2', pluginKey: 'key2', description: 'Second' },
- ];
-
- require('~/app/clients/tools').availableTools.push(...mockPlugins);
-
- getAppConfig.mockResolvedValueOnce({
- filteredTools: ['key2'],
- includedTools: [],
- });
-
- await getAvailablePluginsController(mockReq, mockRes);
-
- const responseData = mockRes.json.mock.calls[0][0];
- expect(responseData).toHaveLength(1);
- expect(responseData[0].pluginKey).toBe('key1');
- });
-
- it('should ignore filteredTools when includedTools is set', async () => {
- const mockPlugins = [
- { name: 'Plugin1', pluginKey: 'key1', description: 'First' },
- { name: 'Plugin2', pluginKey: 'key2', description: 'Second' },
- { name: 'Plugin3', pluginKey: 'key3', description: 'Third' },
- ];
-
- require('~/app/clients/tools').availableTools.push(...mockPlugins);
-
- getAppConfig.mockResolvedValueOnce({
- includedTools: ['key1', 'key2'],
- filteredTools: ['key2'],
- });
-
- await getAvailablePluginsController(mockReq, mockRes);
-
- const responseData = mockRes.json.mock.calls[0][0];
- expect(responseData).toHaveLength(2);
- expect(responseData.map((p) => p.pluginKey)).toEqual(['key1', 'key2']);
- });
});
describe('getAvailableTools', () => {
@@ -165,11 +185,12 @@ describe('PluginController', () => {
},
};
- require('~/app/clients/tools').availableTools.push(
+ const mockCachedPlugins = [
{ name: 'user-tool', pluginKey: 'user-tool', description: 'Duplicate user tool' },
{ name: 'ManifestTool', pluginKey: 'manifest-tool', description: 'Manifest tool' },
- );
+ ];
+ mockCache.get.mockResolvedValue(mockCachedPlugins);
getCachedTools.mockResolvedValueOnce(mockUserTools);
mockReq.config = {
mcpConfig: null,
@@ -181,19 +202,24 @@ describe('PluginController', () => {
expect(mockRes.status).toHaveBeenCalledWith(200);
const responseData = mockRes.json.mock.calls[0][0];
expect(Array.isArray(responseData)).toBe(true);
+ // The real filterUniquePlugins should have deduplicated tools with same pluginKey
const userToolCount = responseData.filter((tool) => tool.pluginKey === 'user-tool').length;
expect(userToolCount).toBe(1);
});
it('should use checkPluginAuth to verify authentication status', async () => {
+ // Add a plugin to availableTools that will be checked
const mockPlugin = {
name: 'Tool1',
pluginKey: 'tool1',
description: 'Tool 1',
+ // No authConfig means checkPluginAuth returns false
};
require('~/app/clients/tools').availableTools.push(mockPlugin);
+ mockCache.get.mockResolvedValue(null);
+ // getCachedTools returns the tool definitions
getCachedTools.mockResolvedValueOnce({
tool1: {
type: 'function',
@@ -216,6 +242,7 @@ describe('PluginController', () => {
expect(Array.isArray(responseData)).toBe(true);
const tool = responseData.find((t) => t.pluginKey === 'tool1');
expect(tool).toBeDefined();
+ // The real checkPluginAuth returns false for plugins without authConfig, so authenticated property is not added
expect(tool.authenticated).toBeUndefined();
});
@@ -229,12 +256,15 @@ describe('PluginController', () => {
require('~/app/clients/tools').availableTools.push(mockToolkit);
+ // Mock toolkits to have a mapping
require('~/app/clients/tools').toolkits.push({
name: 'Toolkit1',
pluginKey: 'toolkit1',
tools: ['toolkit1_function'],
});
+ mockCache.get.mockResolvedValue(null);
+ // getCachedTools returns the tool definitions
getCachedTools.mockResolvedValueOnce({
toolkit1_function: {
type: 'function',
@@ -262,7 +292,7 @@ describe('PluginController', () => {
describe('helper function integration', () => {
it('should handle error cases gracefully', async () => {
- getCachedTools.mockRejectedValue(new Error('Cache error'));
+ mockCache.get.mockRejectedValue(new Error('Cache error'));
await getAvailableTools(mockReq, mockRes);
@@ -272,7 +302,17 @@ describe('PluginController', () => {
});
describe('edge cases with undefined/null values', () => {
- it('should handle null cachedTools', async () => {
+ it('should handle undefined cache gracefully', async () => {
+ getLogStores.mockReturnValue(undefined);
+
+ await getAvailableTools(mockReq, mockRes);
+
+ expect(mockRes.status).toHaveBeenCalledWith(500);
+ });
+
+ it('should handle null cachedTools and cachedUserTools', async () => {
+ mockCache.get.mockResolvedValue(null);
+ // getCachedTools returns empty object instead of null
getCachedTools.mockResolvedValueOnce({});
mockReq.config = {
mcpConfig: null,
@@ -281,40 +321,51 @@ describe('PluginController', () => {
await getAvailableTools(mockReq, mockRes);
+ // Should handle null values gracefully
expect(mockRes.status).toHaveBeenCalledWith(200);
expect(mockRes.json).toHaveBeenCalledWith([]);
});
it('should handle when getCachedTools returns undefined', async () => {
+ mockCache.get.mockResolvedValue(null);
mockReq.config = {
mcpConfig: null,
paths: { structuredTools: '/mock/path' },
};
+ // Mock getCachedTools to return undefined
getCachedTools.mockReset();
getCachedTools.mockResolvedValueOnce(undefined);
await getAvailableTools(mockReq, mockRes);
+ // Should handle undefined values gracefully
expect(mockRes.status).toHaveBeenCalledWith(200);
expect(mockRes.json).toHaveBeenCalledWith([]);
});
it('should handle empty toolDefinitions object', async () => {
+ mockCache.get.mockResolvedValue(null);
+ // Reset getCachedTools to ensure clean state
getCachedTools.mockReset();
getCachedTools.mockResolvedValue({});
- mockReq.config = {};
+ mockReq.config = {}; // No mcpConfig at all
+ // Ensure no plugins are available
require('~/app/clients/tools').availableTools.length = 0;
await getAvailableTools(mockReq, mockRes);
+ // With empty tool definitions, no tools should be in the final output
expect(mockRes.json).toHaveBeenCalledWith([]);
});
it('should handle undefined filteredTools and includedTools', async () => {
mockReq.config = {};
+ mockCache.get.mockResolvedValue(null);
+ // Configure getAppConfig to return config with undefined properties
+ // The controller will use default values [] for filteredTools and includedTools
getAppConfig.mockResolvedValueOnce({});
await getAvailablePluginsController(mockReq, mockRes);
@@ -331,8 +382,13 @@ describe('PluginController', () => {
toolkit: true,
};
+ // No need to mock app.locals anymore as it's not used
+
+ // Add the toolkit to availableTools
require('~/app/clients/tools').availableTools.push(mockToolkit);
+ mockCache.get.mockResolvedValue(null);
+ // getCachedTools returns empty object to avoid null reference error
getCachedTools.mockResolvedValueOnce({});
mockReq.config = {
mcpConfig: null,
@@ -341,32 +397,43 @@ describe('PluginController', () => {
await getAvailableTools(mockReq, mockRes);
+ // Should handle null toolDefinitions gracefully
expect(mockRes.status).toHaveBeenCalledWith(200);
});
- it('should handle undefined toolDefinitions when checking isToolDefined', async () => {
+ it('should handle undefined toolDefinitions when checking isToolDefined (traversaal_search bug)', async () => {
+ // This test reproduces the bug where toolDefinitions is undefined
+ // and accessing toolDefinitions[plugin.pluginKey] causes a TypeError
const mockPlugin = {
name: 'Traversaal Search',
pluginKey: 'traversaal_search',
description: 'Search plugin',
};
+ // Add the plugin to availableTools
require('~/app/clients/tools').availableTools.push(mockPlugin);
+ mockCache.get.mockResolvedValue(null);
+
mockReq.config = {
mcpConfig: null,
paths: { structuredTools: '/mock/path' },
};
+ // CRITICAL: getCachedTools returns undefined
+ // This is what causes the bug when trying to access toolDefinitions[plugin.pluginKey]
getCachedTools.mockResolvedValueOnce(undefined);
+ // This should not throw an error with the optional chaining fix
await getAvailableTools(mockReq, mockRes);
+ // Should handle undefined toolDefinitions gracefully and return empty array
expect(mockRes.status).toHaveBeenCalledWith(200);
expect(mockRes.json).toHaveBeenCalledWith([]);
});
it('should re-initialize tools from appConfig when cache returns null', async () => {
+ // Setup: Initial state with tools in appConfig
const mockAppTools = {
tool1: {
type: 'function',
@@ -386,12 +453,15 @@ describe('PluginController', () => {
},
};
+ // Add matching plugins to availableTools
require('~/app/clients/tools').availableTools.push(
{ name: 'Tool 1', pluginKey: 'tool1', description: 'Tool 1' },
{ name: 'Tool 2', pluginKey: 'tool2', description: 'Tool 2' },
);
- getCachedTools.mockResolvedValueOnce(null);
+ // Simulate cache cleared state (returns null)
+ mockCache.get.mockResolvedValue(null);
+ getCachedTools.mockResolvedValueOnce(null); // Global tools (cache cleared)
mockReq.config = {
filteredTools: [],
@@ -399,12 +469,15 @@ describe('PluginController', () => {
availableTools: mockAppTools,
};
+ // Mock setCachedTools to verify it's called to re-initialize
const { setCachedTools } = require('~/server/services/Config');
await getAvailableTools(mockReq, mockRes);
+ // Should have re-initialized the cache with tools from appConfig
expect(setCachedTools).toHaveBeenCalledWith(mockAppTools);
+ // Should still return tools successfully
expect(mockRes.status).toHaveBeenCalledWith(200);
const responseData = mockRes.json.mock.calls[0][0];
expect(responseData).toHaveLength(2);
@@ -413,22 +486,29 @@ describe('PluginController', () => {
});
it('should handle cache clear without appConfig.availableTools gracefully', async () => {
+ // Setup: appConfig without availableTools
getAppConfig.mockResolvedValue({
filteredTools: [],
includedTools: [],
+ // No availableTools property
});
+ // Clear availableTools array
require('~/app/clients/tools').availableTools.length = 0;
- getCachedTools.mockResolvedValueOnce(null);
+ // Cache returns null (cleared state)
+ mockCache.get.mockResolvedValue(null);
+ getCachedTools.mockResolvedValueOnce(null); // Global tools (cache cleared)
mockReq.config = {
filteredTools: [],
includedTools: [],
+ // No availableTools
};
await getAvailableTools(mockReq, mockRes);
+ // Should handle gracefully without crashing
expect(mockRes.status).toHaveBeenCalledWith(200);
expect(mockRes.json).toHaveBeenCalledWith([]);
});
diff --git a/api/server/controllers/TwoFactorController.js b/api/server/controllers/TwoFactorController.js
index 18a0ee3f5a..fde5965261 100644
--- a/api/server/controllers/TwoFactorController.js
+++ b/api/server/controllers/TwoFactorController.js
@@ -1,6 +1,5 @@
const { encryptV3, logger } = require('@librechat/data-schemas');
const {
- verifyOTPOrBackupCode,
generateBackupCodes,
generateTOTPSecret,
verifyBackupCode,
@@ -14,42 +13,24 @@ const safeAppTitle = (process.env.APP_TITLE || 'LibreChat').replace(/\s+/g, '');
/**
* Enable 2FA for the user by generating a new TOTP secret and backup codes.
* The secret is encrypted and stored, and 2FA is marked as disabled until confirmed.
- * If 2FA is already enabled, requires OTP or backup code verification to re-enroll.
*/
const enable2FA = async (req, res) => {
try {
const userId = req.user.id;
- const existingUser = await getUserById(
- userId,
- '+totpSecret +backupCodes _id twoFactorEnabled email',
- );
-
- if (existingUser && existingUser.twoFactorEnabled) {
- const { token, backupCode } = req.body;
- const result = await verifyOTPOrBackupCode({
- user: existingUser,
- token,
- backupCode,
- persistBackupUse: false,
- });
-
- if (!result.verified) {
- const msg = result.message ?? 'TOTP token or backup code is required to re-enroll 2FA';
- return res.status(result.status ?? 400).json({ message: msg });
- }
- }
-
const secret = generateTOTPSecret();
const { plainCodes, codeObjects } = await generateBackupCodes();
+
+ // Encrypt the secret with v3 encryption before saving.
const encryptedSecret = encryptV3(secret);
+ // Update the user record: store the secret & backup codes and set twoFactorEnabled to false.
const user = await updateUser(userId, {
- pendingTotpSecret: encryptedSecret,
- pendingBackupCodes: codeObjects,
+ totpSecret: encryptedSecret,
+ backupCodes: codeObjects,
+ twoFactorEnabled: false,
});
- const email = user.email || (existingUser && existingUser.email) || '';
- const otpauthUrl = `otpauth://totp/${safeAppTitle}:${email}?secret=${secret}&issuer=${safeAppTitle}`;
+ const otpauthUrl = `otpauth://totp/${safeAppTitle}:${user.email}?secret=${secret}&issuer=${safeAppTitle}`;
return res.status(200).json({ otpauthUrl, backupCodes: plainCodes });
} catch (err) {
@@ -65,14 +46,13 @@ const verify2FA = async (req, res) => {
try {
const userId = req.user.id;
const { token, backupCode } = req.body;
- const user = await getUserById(userId, '+totpSecret +pendingTotpSecret +backupCodes _id');
- const secretSource = user?.pendingTotpSecret ?? user?.totpSecret;
+ const user = await getUserById(userId, '_id totpSecret backupCodes');
- if (!user || !secretSource) {
+ if (!user || !user.totpSecret) {
return res.status(400).json({ message: '2FA not initiated' });
}
- const secret = await getTOTPSecret(secretSource);
+ const secret = await getTOTPSecret(user.totpSecret);
let isVerified = false;
if (token) {
@@ -98,28 +78,15 @@ const confirm2FA = async (req, res) => {
try {
const userId = req.user.id;
const { token } = req.body;
- const user = await getUserById(
- userId,
- '+totpSecret +pendingTotpSecret +pendingBackupCodes _id',
- );
- const secretSource = user?.pendingTotpSecret ?? user?.totpSecret;
+ const user = await getUserById(userId, '_id totpSecret');
- if (!user || !secretSource) {
+ if (!user || !user.totpSecret) {
return res.status(400).json({ message: '2FA not initiated' });
}
- const secret = await getTOTPSecret(secretSource);
+ const secret = await getTOTPSecret(user.totpSecret);
if (await verifyTOTP(secret, token)) {
- const update = {
- totpSecret: user.pendingTotpSecret ?? user.totpSecret,
- twoFactorEnabled: true,
- pendingTotpSecret: null,
- pendingBackupCodes: [],
- };
- if (user.pendingBackupCodes?.length) {
- update.backupCodes = user.pendingBackupCodes;
- }
- await updateUser(userId, update);
+ await updateUser(userId, { twoFactorEnabled: true });
return res.status(200).json();
}
return res.status(400).json({ message: 'Invalid token.' });
@@ -137,27 +104,31 @@ const disable2FA = async (req, res) => {
try {
const userId = req.user.id;
const { token, backupCode } = req.body;
- const user = await getUserById(userId, '+totpSecret +backupCodes _id twoFactorEnabled');
+ const user = await getUserById(userId, '_id totpSecret backupCodes');
if (!user || !user.totpSecret) {
return res.status(400).json({ message: '2FA is not setup for this user' });
}
if (user.twoFactorEnabled) {
- const result = await verifyOTPOrBackupCode({ user, token, backupCode });
+ const secret = await getTOTPSecret(user.totpSecret);
+ let isVerified = false;
- if (!result.verified) {
- const msg = result.message ?? 'Either token or backup code is required to disable 2FA';
- return res.status(result.status ?? 400).json({ message: msg });
+ if (token) {
+ isVerified = await verifyTOTP(secret, token);
+ } else if (backupCode) {
+ isVerified = await verifyBackupCode({ user, backupCode });
+ } else {
+ return res
+ .status(400)
+ .json({ message: 'Either token or backup code is required to disable 2FA' });
+ }
+
+ if (!isVerified) {
+ return res.status(401).json({ message: 'Invalid token or backup code' });
}
}
- await updateUser(userId, {
- totpSecret: null,
- backupCodes: [],
- twoFactorEnabled: false,
- pendingTotpSecret: null,
- pendingBackupCodes: [],
- });
+ await updateUser(userId, { totpSecret: null, backupCodes: [], twoFactorEnabled: false });
return res.status(200).json();
} catch (err) {
logger.error('[disable2FA]', err);
@@ -167,28 +138,10 @@ const disable2FA = async (req, res) => {
/**
* Regenerate backup codes for the user.
- * Requires OTP or backup code verification if 2FA is already enabled.
*/
const regenerateBackupCodes = async (req, res) => {
try {
const userId = req.user.id;
- const user = await getUserById(userId, '+totpSecret +backupCodes _id twoFactorEnabled');
-
- if (!user) {
- return res.status(404).json({ message: 'User not found' });
- }
-
- if (user.twoFactorEnabled) {
- const { token, backupCode } = req.body;
- const result = await verifyOTPOrBackupCode({ user, token, backupCode });
-
- if (!result.verified) {
- const msg =
- result.message ?? 'TOTP token or backup code is required to regenerate backup codes';
- return res.status(result.status ?? 400).json({ message: msg });
- }
- }
-
const { plainCodes, codeObjects } = await generateBackupCodes();
await updateUser(userId, { backupCodes: codeObjects });
return res.status(200).json({
diff --git a/api/server/controllers/UserController.js b/api/server/controllers/UserController.js
index 16b68968d9..7a9dd8125e 100644
--- a/api/server/controllers/UserController.js
+++ b/api/server/controllers/UserController.js
@@ -1,32 +1,52 @@
-const mongoose = require('mongoose');
const { logger, webSearchKeys } = require('@librechat/data-schemas');
+const { Tools, CacheKeys, Constants, FileSources } = require('librechat-data-provider');
const {
- getNewS3URL,
- needsRefresh,
MCPOAuthHandler,
MCPTokenStorage,
normalizeHttpError,
extractWebSearchEnvVars,
} = require('@librechat/api');
const {
- Tools,
- CacheKeys,
- Constants,
- FileSources,
- ResourceType,
-} = require('librechat-data-provider');
+ deleteAllUserSessions,
+ deleteAllSharedLinks,
+ updateUserPlugins,
+ deleteUserById,
+ deleteMessages,
+ deletePresets,
+ deleteUserKey,
+ deleteConvos,
+ deleteFiles,
+ updateUser,
+ findToken,
+ getFiles,
+} = require('~/models');
+const {
+ ConversationTag,
+ AgentApiKey,
+ Transaction,
+ MemoryEntry,
+ Assistant,
+ AclEntry,
+ Balance,
+ Action,
+ Group,
+ Token,
+ User,
+} = require('~/db/models');
const { updateUserPluginAuth, deleteUserPluginAuth } = require('~/server/services/PluginService');
-const { verifyOTPOrBackupCode } = require('~/server/services/twoFactorService');
const { verifyEmail, resendVerificationEmail } = require('~/server/services/AuthService');
const { getMCPManager, getFlowStateManager, getMCPServersRegistry } = require('~/config');
const { invalidateCachedTools } = require('~/server/services/Config/getCachedTools');
+const { needsRefresh, getNewS3URL } = require('~/server/services/Files/S3/crud');
const { processDeleteRequest } = require('~/server/services/Files/process');
const { getAppConfig } = require('~/server/services/Config');
+const { deleteToolCalls } = require('~/models/ToolCall');
+const { deleteUserPrompts } = require('~/models/Prompt');
+const { deleteUserAgents } = require('~/models/Agent');
const { getLogStores } = require('~/cache');
-const db = require('~/models');
const getUserController = async (req, res) => {
- const appConfig = await getAppConfig({ role: req.user?.role, tenantId: req.user?.tenantId });
+ const appConfig = await getAppConfig({ role: req.user?.role });
/** @type {IUser} */
const userData = req.user.toObject != null ? req.user.toObject() : { ...req.user };
/**
@@ -44,7 +64,7 @@ const getUserController = async (req, res) => {
const originalAvatar = userData.avatar;
try {
userData.avatar = await getNewS3URL(userData.avatar);
- await db.updateUser(userData.id, { avatar: userData.avatar });
+ await updateUser(userData.id, { avatar: userData.avatar });
} catch (error) {
userData.avatar = originalAvatar;
logger.error('Error getting new S3 URL for avatar:', error);
@@ -55,7 +75,7 @@ const getUserController = async (req, res) => {
const getTermsStatusController = async (req, res) => {
try {
- const user = await db.getUserById(req.user.id, 'termsAccepted');
+ const user = await User.findById(req.user.id);
if (!user) {
return res.status(404).json({ message: 'User not found' });
}
@@ -68,7 +88,7 @@ const getTermsStatusController = async (req, res) => {
const acceptTermsController = async (req, res) => {
try {
- const user = await db.updateUser(req.user.id, { termsAccepted: true });
+ const user = await User.findByIdAndUpdate(req.user.id, { termsAccepted: true }, { new: true });
if (!user) {
return res.status(404).json({ message: 'User not found' });
}
@@ -81,7 +101,7 @@ const acceptTermsController = async (req, res) => {
const deleteUserFiles = async (req) => {
try {
- const userFiles = await db.getFiles({ user: req.user.id });
+ const userFiles = await getFiles({ user: req.user.id });
await processDeleteRequest({
req,
files: userFiles,
@@ -91,86 +111,13 @@ const deleteUserFiles = async (req) => {
}
};
-/**
- * Deletes MCP servers solely owned by the user and cleans up their ACLs.
- * Disconnects live sessions for deleted servers before removing DB records.
- * Servers with other owners are left intact; the caller is responsible for
- * removing the user's own ACL principal entries separately.
- *
- * Also handles legacy (pre-ACL) MCP servers that only have the author field set,
- * ensuring they are not orphaned if no permission migration has been run.
- * @param {string} userId - The ID of the user.
- */
-const deleteUserMcpServers = async (userId) => {
- try {
- const MCPServer = mongoose.models.MCPServer;
- const AclEntry = mongoose.models.AclEntry;
- if (!MCPServer) {
- return;
- }
-
- const userObjectId = new mongoose.Types.ObjectId(userId);
- const soleOwnedIds = await db.getSoleOwnedResourceIds(userObjectId, ResourceType.MCPSERVER);
-
- const authoredServers = await MCPServer.find({ author: userObjectId })
- .select('_id serverName')
- .lean();
-
- const migratedEntries =
- authoredServers.length > 0
- ? await AclEntry.find({
- resourceType: ResourceType.MCPSERVER,
- resourceId: { $in: authoredServers.map((s) => s._id) },
- })
- .select('resourceId')
- .lean()
- : [];
- const migratedIds = new Set(migratedEntries.map((e) => e.resourceId.toString()));
- const legacyServers = authoredServers.filter((s) => !migratedIds.has(s._id.toString()));
- const legacyServerIds = legacyServers.map((s) => s._id);
-
- const allServerIdsToDelete = [...soleOwnedIds, ...legacyServerIds];
-
- if (allServerIdsToDelete.length === 0) {
- return;
- }
-
- const aclOwnedServers =
- soleOwnedIds.length > 0
- ? await MCPServer.find({ _id: { $in: soleOwnedIds } })
- .select('serverName')
- .lean()
- : [];
- const allServersToDelete = [...aclOwnedServers, ...legacyServers];
-
- const mcpManager = getMCPManager();
- if (mcpManager) {
- await Promise.all(
- allServersToDelete.map(async (s) => {
- await mcpManager.disconnectUserConnection(userId, s.serverName);
- await invalidateCachedTools({ userId, serverName: s.serverName });
- }),
- );
- }
-
- await AclEntry.deleteMany({
- resourceType: ResourceType.MCPSERVER,
- resourceId: { $in: allServerIdsToDelete },
- });
-
- await MCPServer.deleteMany({ _id: { $in: allServerIdsToDelete } });
- } catch (error) {
- logger.error('[deleteUserMcpServers] General error:', error);
- }
-};
-
const updateUserPluginsController = async (req, res) => {
- const appConfig = await getAppConfig({ role: req.user?.role, tenantId: req.user?.tenantId });
+ const appConfig = await getAppConfig({ role: req.user?.role });
const { user } = req;
const { pluginKey, action, auth, isEntityTool } = req.body;
try {
if (!isEntityTool) {
- await db.updateUserPlugins(user._id, user.plugins, pluginKey, action);
+ await updateUserPlugins(user._id, user.plugins, pluginKey, action);
}
if (auth == null) {
@@ -294,50 +241,37 @@ const deleteUserController = async (req, res) => {
const { user } = req;
try {
- const existingUser = await db.getUserById(
- user.id,
- '+totpSecret +backupCodes _id twoFactorEnabled',
- );
- if (existingUser && existingUser.twoFactorEnabled) {
- const { token, backupCode } = req.body;
- const result = await verifyOTPOrBackupCode({ user: existingUser, token, backupCode });
-
- if (!result.verified) {
- const msg =
- result.message ??
- 'TOTP token or backup code is required to delete account with 2FA enabled';
- return res.status(result.status ?? 400).json({ message: msg });
- }
- }
-
- await db.deleteMessages({ user: user.id });
- await db.deleteAllUserSessions({ userId: user.id });
- await db.deleteTransactions({ user: user.id });
- await db.deleteUserKey({ userId: user.id, all: true });
- await db.deleteBalances({ user: user._id });
- await db.deletePresets(user.id);
+ await deleteMessages({ user: user.id }); // delete user messages
+ await deleteAllUserSessions({ userId: user.id }); // delete user sessions
+ await Transaction.deleteMany({ user: user.id }); // delete user transactions
+ await deleteUserKey({ userId: user.id, all: true }); // delete user keys
+ await Balance.deleteMany({ user: user._id }); // delete user balances
+ await deletePresets(user.id); // delete user presets
try {
- await db.deleteConvos(user.id);
+ await deleteConvos(user.id); // delete user convos
} catch (error) {
logger.error('[deleteUserController] Error deleting user convos, likely no convos', error);
}
- await deleteUserPluginAuth(user.id, null, true);
- await db.deleteUserById(user.id);
- await db.deleteAllSharedLinks(user.id);
- await deleteUserFiles(req);
- await db.deleteFiles(null, user.id);
- await db.deleteToolCalls(user.id);
- await db.deleteUserAgents(user.id);
- await db.deleteAllAgentApiKeys(user._id);
- await db.deleteAssistants({ user: user.id });
- await db.deleteConversationTags({ user: user.id });
- await db.deleteAllUserMemories(user.id);
- await db.deleteUserPrompts(user.id);
- await deleteUserMcpServers(user.id);
- await db.deleteActions({ user: user.id });
- await db.deleteTokens({ userId: user.id });
- await db.removeUserFromAllGroups(user.id);
- await db.deleteAclEntries({ principalId: user._id });
+ await deleteUserPluginAuth(user.id, null, true); // delete user plugin auth
+ await deleteUserById(user.id); // delete user
+ await deleteAllSharedLinks(user.id); // delete user shared links
+ await deleteUserFiles(req); // delete user files
+ await deleteFiles(null, user.id); // delete database files in case of orphaned files from previous steps
+ await deleteToolCalls(user.id); // delete user tool calls
+ await deleteUserAgents(user.id); // delete user agents
+ await AgentApiKey.deleteMany({ user: user._id }); // delete user agent API keys
+ await Assistant.deleteMany({ user: user.id }); // delete user assistants
+ await ConversationTag.deleteMany({ user: user.id }); // delete user conversation tags
+ await MemoryEntry.deleteMany({ userId: user.id }); // delete user memory entries
+ await deleteUserPrompts(req, user.id); // delete user prompts
+ await Action.deleteMany({ user: user.id }); // delete user actions
+ await Token.deleteMany({ userId: user.id }); // delete user OAuth tokens
+ await Group.updateMany(
+ // remove user from all groups
+ { memberIds: user.id },
+ { $pull: { memberIds: user.id } },
+ );
+ await AclEntry.deleteMany({ principalId: user._id }); // delete user ACL entries
logger.info(`User deleted account. Email: ${user.email} ID: ${user.id}`);
res.status(200).send({ message: 'User deleted' });
} catch (err) {
@@ -397,7 +331,7 @@ const maybeUninstallOAuthMCP = async (userId, pluginKey, appConfig) => {
const clientTokenData = await MCPTokenStorage.getClientInfoAndMetadata({
userId,
serverName,
- findToken: db.findToken,
+ findToken,
});
if (clientTokenData == null) {
return;
@@ -408,7 +342,7 @@ const maybeUninstallOAuthMCP = async (userId, pluginKey, appConfig) => {
const tokens = await MCPTokenStorage.getTokens({
userId,
serverName,
- findToken: db.findToken,
+ findToken,
});
// 3. revoke OAuth tokens at the provider
@@ -418,7 +352,6 @@ const maybeUninstallOAuthMCP = async (userId, pluginKey, appConfig) => {
serverConfig.oauth?.revocation_endpoint_auth_methods_supported ??
clientMetadata.revocation_endpoint_auth_methods_supported;
const oauthHeaders = serverConfig.oauth_headers ?? {};
- const allowedDomains = getMCPServersRegistry().getAllowedDomains();
if (tokens?.access_token) {
try {
@@ -434,7 +367,6 @@ const maybeUninstallOAuthMCP = async (userId, pluginKey, appConfig) => {
revocationEndpointAuthMethodsSupported,
},
oauthHeaders,
- allowedDomains,
);
} catch (error) {
logger.error(`Error revoking OAuth access token for ${serverName}:`, error);
@@ -455,7 +387,6 @@ const maybeUninstallOAuthMCP = async (userId, pluginKey, appConfig) => {
revocationEndpointAuthMethodsSupported,
},
oauthHeaders,
- allowedDomains,
);
} catch (error) {
logger.error(`Error revoking OAuth refresh token for ${serverName}:`, error);
@@ -467,7 +398,7 @@ const maybeUninstallOAuthMCP = async (userId, pluginKey, appConfig) => {
userId,
serverName,
deleteToken: async (filter) => {
- await db.deleteTokens(filter);
+ await Token.deleteOne(filter);
},
});
@@ -487,5 +418,4 @@ module.exports = {
verifyEmailController,
updateUserPluginsController,
resendVerificationController,
- deleteUserMcpServers,
};
diff --git a/api/server/controllers/UserController.spec.js b/api/server/controllers/UserController.spec.js
deleted file mode 100644
index 4a96072062..0000000000
--- a/api/server/controllers/UserController.spec.js
+++ /dev/null
@@ -1,225 +0,0 @@
-const mongoose = require('mongoose');
-const { MongoMemoryServer } = require('mongodb-memory-server');
-
-jest.mock('@librechat/data-schemas', () => {
- const actual = jest.requireActual('@librechat/data-schemas');
- return {
- ...actual,
- logger: {
- debug: jest.fn(),
- error: jest.fn(),
- warn: jest.fn(),
- info: jest.fn(),
- },
- };
-});
-
-jest.mock('~/models', () => {
- const _mongoose = require('mongoose');
- return {
- deleteAllUserSessions: jest.fn().mockResolvedValue(undefined),
- deleteAllSharedLinks: jest.fn().mockResolvedValue(undefined),
- deleteAllAgentApiKeys: jest.fn().mockResolvedValue(undefined),
- deleteConversationTags: jest.fn().mockResolvedValue(undefined),
- deleteAllUserMemories: jest.fn().mockResolvedValue(undefined),
- deleteTransactions: jest.fn().mockResolvedValue(undefined),
- deleteAclEntries: jest.fn().mockResolvedValue(undefined),
- updateUserPlugins: jest.fn(),
- deleteAssistants: jest.fn().mockResolvedValue(undefined),
- deleteUserById: jest.fn().mockResolvedValue(undefined),
- deleteUserPrompts: jest.fn().mockResolvedValue(undefined),
- deleteMessages: jest.fn().mockResolvedValue(undefined),
- deleteBalances: jest.fn().mockResolvedValue(undefined),
- deleteActions: jest.fn().mockResolvedValue(undefined),
- deletePresets: jest.fn().mockResolvedValue(undefined),
- deleteUserKey: jest.fn().mockResolvedValue(undefined),
- deleteToolCalls: jest.fn().mockResolvedValue(undefined),
- deleteUserAgents: jest.fn().mockResolvedValue(undefined),
- deleteTokens: jest.fn().mockResolvedValue(undefined),
- deleteConvos: jest.fn().mockResolvedValue(undefined),
- deleteFiles: jest.fn().mockResolvedValue(undefined),
- updateUser: jest.fn(),
- getUserById: jest.fn().mockResolvedValue(null),
- findToken: jest.fn(),
- getFiles: jest.fn().mockResolvedValue([]),
- removeUserFromAllGroups: jest.fn().mockImplementation(async (userId) => {
- const Group = _mongoose.models.Group;
- await Group.updateMany({ memberIds: userId }, { $pullAll: { memberIds: [userId] } });
- }),
- };
-});
-
-jest.mock('~/server/services/PluginService', () => ({
- updateUserPluginAuth: jest.fn(),
- deleteUserPluginAuth: jest.fn().mockResolvedValue(undefined),
-}));
-
-jest.mock('~/server/services/AuthService', () => ({
- verifyEmail: jest.fn(),
- resendVerificationEmail: jest.fn(),
-}));
-
-jest.mock('sharp', () =>
- jest.fn(() => ({
- metadata: jest.fn().mockResolvedValue({}),
- toFormat: jest.fn().mockReturnThis(),
- toBuffer: jest.fn().mockResolvedValue(Buffer.alloc(0)),
- })),
-);
-
-jest.mock('@librechat/api', () => ({
- ...jest.requireActual('@librechat/api'),
- needsRefresh: jest.fn(),
- getNewS3URL: jest.fn(),
-}));
-
-jest.mock('~/server/services/Files/process', () => ({
- processDeleteRequest: jest.fn().mockResolvedValue(undefined),
-}));
-
-jest.mock('~/server/services/Config', () => ({
- getAppConfig: jest.fn().mockResolvedValue({}),
- getMCPManager: jest.fn(),
- getFlowStateManager: jest.fn(),
- getMCPServersRegistry: jest.fn(),
-}));
-
-jest.mock('~/cache', () => ({
- getLogStores: jest.fn(),
-}));
-
-let mongoServer;
-
-beforeAll(async () => {
- mongoServer = await MongoMemoryServer.create();
- await mongoose.connect(mongoServer.getUri());
-});
-
-afterAll(async () => {
- await mongoose.disconnect();
- await mongoServer.stop();
-});
-
-afterEach(async () => {
- const collections = mongoose.connection.collections;
- for (const key in collections) {
- await collections[key].deleteMany({});
- }
-});
-
-const { deleteUserController } = require('./UserController');
-const { Group } = require('~/db/models');
-const { deleteConvos } = require('~/models');
-
-describe('deleteUserController', () => {
- const mockRes = {
- status: jest.fn().mockReturnThis(),
- send: jest.fn().mockReturnThis(),
- json: jest.fn().mockReturnThis(),
- };
-
- beforeEach(() => {
- jest.clearAllMocks();
- });
-
- it('should return 200 on successful deletion', async () => {
- const userId = new mongoose.Types.ObjectId();
- const req = { user: { id: userId.toString(), _id: userId, email: 'test@test.com' } };
-
- await deleteUserController(req, mockRes);
-
- expect(mockRes.status).toHaveBeenCalledWith(200);
- expect(mockRes.send).toHaveBeenCalledWith({ message: 'User deleted' });
- });
-
- it('should remove the user from all groups via $pullAll', async () => {
- const userId = new mongoose.Types.ObjectId();
- const userIdStr = userId.toString();
- const otherUser = new mongoose.Types.ObjectId().toString();
-
- await Group.create([
- { name: 'Group A', memberIds: [userIdStr, otherUser], source: 'local' },
- { name: 'Group B', memberIds: [userIdStr], source: 'local' },
- { name: 'Group C', memberIds: [otherUser], source: 'local' },
- ]);
-
- const req = { user: { id: userIdStr, _id: userId, email: 'del@test.com' } };
- await deleteUserController(req, mockRes);
-
- const groups = await Group.find({}).sort({ name: 1 }).lean();
- expect(groups[0].memberIds).toEqual([otherUser]);
- expect(groups[1].memberIds).toEqual([]);
- expect(groups[2].memberIds).toEqual([otherUser]);
- });
-
- it('should handle user that exists in no groups', async () => {
- const userId = new mongoose.Types.ObjectId();
- await Group.create({ name: 'Empty', memberIds: ['someone-else'], source: 'local' });
-
- const req = { user: { id: userId.toString(), _id: userId, email: 'no-groups@test.com' } };
- await deleteUserController(req, mockRes);
-
- expect(mockRes.status).toHaveBeenCalledWith(200);
- const group = await Group.findOne({ name: 'Empty' }).lean();
- expect(group.memberIds).toEqual(['someone-else']);
- });
-
- it('should remove duplicate memberIds if the user appears more than once', async () => {
- const userId = new mongoose.Types.ObjectId();
- const userIdStr = userId.toString();
-
- await Group.create({
- name: 'Dupes',
- memberIds: [userIdStr, 'other', userIdStr],
- source: 'local',
- });
-
- const req = { user: { id: userIdStr, _id: userId, email: 'dupe@test.com' } };
- await deleteUserController(req, mockRes);
-
- const group = await Group.findOne({ name: 'Dupes' }).lean();
- expect(group.memberIds).toEqual(['other']);
- });
-
- it('should still succeed when deleteConvos throws', async () => {
- const userId = new mongoose.Types.ObjectId();
- deleteConvos.mockRejectedValueOnce(new Error('no convos'));
-
- const req = { user: { id: userId.toString(), _id: userId, email: 'convos@test.com' } };
- await deleteUserController(req, mockRes);
-
- expect(mockRes.status).toHaveBeenCalledWith(200);
- expect(mockRes.send).toHaveBeenCalledWith({ message: 'User deleted' });
- });
-
- it('should return 500 when a critical operation fails', async () => {
- const userId = new mongoose.Types.ObjectId();
- const { deleteMessages } = require('~/models');
- deleteMessages.mockRejectedValueOnce(new Error('db down'));
-
- const req = { user: { id: userId.toString(), _id: userId, email: 'fail@test.com' } };
- await deleteUserController(req, mockRes);
-
- expect(mockRes.status).toHaveBeenCalledWith(500);
- expect(mockRes.json).toHaveBeenCalledWith({ message: 'Something went wrong.' });
- });
-
- it('should use string user.id (not ObjectId user._id) for memberIds removal', async () => {
- const userId = new mongoose.Types.ObjectId();
- const userIdStr = userId.toString();
- const otherUser = 'other-user-id';
-
- await Group.create({
- name: 'StringCheck',
- memberIds: [userIdStr, otherUser],
- source: 'local',
- });
-
- const req = { user: { id: userIdStr, _id: userId, email: 'stringcheck@test.com' } };
- await deleteUserController(req, mockRes);
-
- const group = await Group.findOne({ name: 'StringCheck' }).lean();
- expect(group.memberIds).toEqual([otherUser]);
- expect(group.memberIds).not.toContain(userIdStr);
- });
-});
diff --git a/api/server/controllers/__tests__/PermissionsController.spec.js b/api/server/controllers/__tests__/PermissionsController.spec.js
deleted file mode 100644
index a8d9518455..0000000000
--- a/api/server/controllers/__tests__/PermissionsController.spec.js
+++ /dev/null
@@ -1,242 +0,0 @@
-const mongoose = require('mongoose');
-
-const mockLogger = { error: jest.fn(), warn: jest.fn(), info: jest.fn(), debug: jest.fn() };
-
-jest.mock('@librechat/data-schemas', () => ({
- logger: mockLogger,
-}));
-
-const { ResourceType, PrincipalType } = jest.requireActual('librechat-data-provider');
-
-jest.mock('librechat-data-provider', () => ({
- ...jest.requireActual('librechat-data-provider'),
-}));
-
-jest.mock('@librechat/api', () => ({
- enrichRemoteAgentPrincipals: jest.fn(),
- backfillRemoteAgentPermissions: jest.fn(),
-}));
-
-const mockBulkUpdateResourcePermissions = jest.fn();
-
-jest.mock('~/server/services/PermissionService', () => ({
- bulkUpdateResourcePermissions: (...args) => mockBulkUpdateResourcePermissions(...args),
- ensureGroupPrincipalExists: jest.fn(),
- getEffectivePermissions: jest.fn(),
- ensurePrincipalExists: jest.fn(),
- getAvailableRoles: jest.fn(),
- findAccessibleResources: jest.fn(),
- getResourcePermissionsMap: jest.fn(),
-}));
-
-const mockRemoveAgentFromUserFavorites = jest.fn();
-
-jest.mock('~/models', () => ({
- searchPrincipals: jest.fn(),
- sortPrincipalsByRelevance: jest.fn(),
- calculateRelevanceScore: jest.fn(),
- removeAgentFromUserFavorites: (...args) => mockRemoveAgentFromUserFavorites(...args),
-}));
-
-jest.mock('~/server/services/GraphApiService', () => ({
- entraIdPrincipalFeatureEnabled: jest.fn(() => false),
- searchEntraIdPrincipals: jest.fn(),
-}));
-
-const { updateResourcePermissions } = require('../PermissionsController');
-
-const createMockReq = (overrides = {}) => ({
- params: { resourceType: ResourceType.AGENT, resourceId: '507f1f77bcf86cd799439011' },
- body: { updated: [], removed: [], public: false },
- user: { id: 'user-1', role: 'USER' },
- headers: { authorization: '' },
- ...overrides,
-});
-
-const createMockRes = () => {
- const res = {};
- res.status = jest.fn().mockReturnValue(res);
- res.json = jest.fn().mockReturnValue(res);
- return res;
-};
-
-const flushPromises = () => new Promise((resolve) => setImmediate(resolve));
-
-describe('PermissionsController', () => {
- beforeEach(() => {
- jest.clearAllMocks();
- });
-
- describe('updateResourcePermissions — favorites cleanup', () => {
- const agentObjectId = new mongoose.Types.ObjectId().toString();
- const revokedUserId = new mongoose.Types.ObjectId().toString();
-
- beforeEach(() => {
- mockBulkUpdateResourcePermissions.mockResolvedValue({
- granted: [],
- updated: [],
- revoked: [{ type: PrincipalType.USER, id: revokedUserId, name: 'Revoked User' }],
- errors: [],
- });
-
- mockRemoveAgentFromUserFavorites.mockResolvedValue(undefined);
- });
-
- it('removes agent from revoked users favorites on AGENT resource type', async () => {
- const req = createMockReq({
- params: { resourceType: ResourceType.AGENT, resourceId: agentObjectId },
- body: {
- updated: [],
- removed: [{ type: PrincipalType.USER, id: revokedUserId }],
- public: false,
- },
- });
- const res = createMockRes();
-
- await updateResourcePermissions(req, res);
- await flushPromises();
-
- expect(res.status).toHaveBeenCalledWith(200);
- expect(mockRemoveAgentFromUserFavorites).toHaveBeenCalledWith(agentObjectId, [revokedUserId]);
- });
-
- it('removes agent from revoked users favorites on REMOTE_AGENT resource type', async () => {
- const req = createMockReq({
- params: { resourceType: ResourceType.REMOTE_AGENT, resourceId: agentObjectId },
- body: {
- updated: [],
- removed: [{ type: PrincipalType.USER, id: revokedUserId }],
- public: false,
- },
- });
- const res = createMockRes();
-
- await updateResourcePermissions(req, res);
- await flushPromises();
-
- expect(mockRemoveAgentFromUserFavorites).toHaveBeenCalledWith(agentObjectId, [revokedUserId]);
- });
-
- it('uses results.revoked (validated) not raw request payload', async () => {
- const validId = new mongoose.Types.ObjectId().toString();
- const invalidId = 'not-a-valid-id';
-
- mockBulkUpdateResourcePermissions.mockResolvedValue({
- granted: [],
- updated: [],
- revoked: [{ type: PrincipalType.USER, id: validId }],
- errors: [{ principal: { type: PrincipalType.USER, id: invalidId }, error: 'Invalid ID' }],
- });
-
- const req = createMockReq({
- params: { resourceType: ResourceType.AGENT, resourceId: agentObjectId },
- body: {
- updated: [],
- removed: [
- { type: PrincipalType.USER, id: validId },
- { type: PrincipalType.USER, id: invalidId },
- ],
- public: false,
- },
- });
- const res = createMockRes();
-
- await updateResourcePermissions(req, res);
- await flushPromises();
-
- expect(mockRemoveAgentFromUserFavorites).toHaveBeenCalledWith(agentObjectId, [validId]);
- });
-
- it('skips cleanup when no USER principals are revoked', async () => {
- mockBulkUpdateResourcePermissions.mockResolvedValue({
- granted: [],
- updated: [],
- revoked: [{ type: PrincipalType.GROUP, id: 'group-1' }],
- errors: [],
- });
-
- const req = createMockReq({
- params: { resourceType: ResourceType.AGENT, resourceId: agentObjectId },
- body: {
- updated: [],
- removed: [{ type: PrincipalType.GROUP, id: 'group-1' }],
- public: false,
- },
- });
- const res = createMockRes();
-
- await updateResourcePermissions(req, res);
- await flushPromises();
-
- expect(mockRemoveAgentFromUserFavorites).not.toHaveBeenCalled();
- });
-
- it('skips cleanup for non-agent resource types', async () => {
- mockBulkUpdateResourcePermissions.mockResolvedValue({
- granted: [],
- updated: [],
- revoked: [{ type: PrincipalType.USER, id: revokedUserId }],
- errors: [],
- });
-
- const req = createMockReq({
- params: { resourceType: ResourceType.PROMPTGROUP, resourceId: agentObjectId },
- body: {
- updated: [],
- removed: [{ type: PrincipalType.USER, id: revokedUserId }],
- public: false,
- },
- });
- const res = createMockRes();
-
- await updateResourcePermissions(req, res);
- await flushPromises();
-
- expect(res.status).toHaveBeenCalledWith(200);
- expect(mockRemoveAgentFromUserFavorites).not.toHaveBeenCalled();
- });
-
- it('handles agent not found gracefully', async () => {
- mockRemoveAgentFromUserFavorites.mockResolvedValue(undefined);
-
- const req = createMockReq({
- params: { resourceType: ResourceType.AGENT, resourceId: agentObjectId },
- body: {
- updated: [],
- removed: [{ type: PrincipalType.USER, id: revokedUserId }],
- public: false,
- },
- });
- const res = createMockRes();
-
- await updateResourcePermissions(req, res);
- await flushPromises();
-
- expect(mockRemoveAgentFromUserFavorites).toHaveBeenCalled();
- expect(res.status).toHaveBeenCalledWith(200);
- });
-
- it('logs error when removeAgentFromUserFavorites fails without blocking response', async () => {
- mockRemoveAgentFromUserFavorites.mockRejectedValue(new Error('DB connection lost'));
-
- const req = createMockReq({
- params: { resourceType: ResourceType.AGENT, resourceId: agentObjectId },
- body: {
- updated: [],
- removed: [{ type: PrincipalType.USER, id: revokedUserId }],
- public: false,
- },
- });
- const res = createMockRes();
-
- await updateResourcePermissions(req, res);
- await flushPromises();
-
- expect(res.status).toHaveBeenCalledWith(200);
- expect(mockLogger.error).toHaveBeenCalledWith(
- '[removeRevokedAgentFromFavorites] Error cleaning up favorites',
- expect.any(Error),
- );
- });
- });
-});
diff --git a/api/server/controllers/__tests__/TwoFactorController.spec.js b/api/server/controllers/__tests__/TwoFactorController.spec.js
deleted file mode 100644
index 62531d94a1..0000000000
--- a/api/server/controllers/__tests__/TwoFactorController.spec.js
+++ /dev/null
@@ -1,264 +0,0 @@
-const mockGetUserById = jest.fn();
-const mockUpdateUser = jest.fn();
-const mockVerifyOTPOrBackupCode = jest.fn();
-const mockGenerateTOTPSecret = jest.fn();
-const mockGenerateBackupCodes = jest.fn();
-const mockEncryptV3 = jest.fn();
-
-jest.mock('@librechat/data-schemas', () => ({
- encryptV3: (...args) => mockEncryptV3(...args),
- logger: { error: jest.fn() },
-}));
-
-jest.mock('~/server/services/twoFactorService', () => ({
- verifyOTPOrBackupCode: (...args) => mockVerifyOTPOrBackupCode(...args),
- generateBackupCodes: (...args) => mockGenerateBackupCodes(...args),
- generateTOTPSecret: (...args) => mockGenerateTOTPSecret(...args),
- verifyBackupCode: jest.fn(),
- getTOTPSecret: jest.fn(),
- verifyTOTP: jest.fn(),
-}));
-
-jest.mock('~/models', () => ({
- getUserById: (...args) => mockGetUserById(...args),
- updateUser: (...args) => mockUpdateUser(...args),
-}));
-
-const { enable2FA, regenerateBackupCodes } = require('~/server/controllers/TwoFactorController');
-
-function createRes() {
- const res = {};
- res.status = jest.fn().mockReturnValue(res);
- res.json = jest.fn().mockReturnValue(res);
- return res;
-}
-
-const PLAIN_CODES = ['code1', 'code2', 'code3'];
-const CODE_OBJECTS = [
- { codeHash: 'h1', used: false, usedAt: null },
- { codeHash: 'h2', used: false, usedAt: null },
- { codeHash: 'h3', used: false, usedAt: null },
-];
-
-beforeEach(() => {
- jest.clearAllMocks();
- mockGenerateTOTPSecret.mockReturnValue('NEWSECRET');
- mockGenerateBackupCodes.mockResolvedValue({ plainCodes: PLAIN_CODES, codeObjects: CODE_OBJECTS });
- mockEncryptV3.mockReturnValue('encrypted-secret');
-});
-
-describe('enable2FA', () => {
- it('allows first-time setup without token — writes to pending fields', async () => {
- const req = { user: { id: 'user1' }, body: {} };
- const res = createRes();
- mockGetUserById.mockResolvedValue({ _id: 'user1', twoFactorEnabled: false, email: 'a@b.com' });
- mockUpdateUser.mockResolvedValue({ email: 'a@b.com' });
-
- await enable2FA(req, res);
-
- expect(res.status).toHaveBeenCalledWith(200);
- expect(res.json).toHaveBeenCalledWith(
- expect.objectContaining({ otpauthUrl: expect.any(String), backupCodes: PLAIN_CODES }),
- );
- expect(mockVerifyOTPOrBackupCode).not.toHaveBeenCalled();
- const updateCall = mockUpdateUser.mock.calls[0][1];
- expect(updateCall).toHaveProperty('pendingTotpSecret', 'encrypted-secret');
- expect(updateCall).toHaveProperty('pendingBackupCodes', CODE_OBJECTS);
- expect(updateCall).not.toHaveProperty('twoFactorEnabled');
- expect(updateCall).not.toHaveProperty('totpSecret');
- expect(updateCall).not.toHaveProperty('backupCodes');
- });
-
- it('re-enrollment writes to pending fields, leaving live 2FA intact', async () => {
- const req = { user: { id: 'user1' }, body: { token: '123456' } };
- const res = createRes();
- const existingUser = {
- _id: 'user1',
- twoFactorEnabled: true,
- totpSecret: 'enc-secret',
- email: 'a@b.com',
- };
- mockGetUserById.mockResolvedValue(existingUser);
- mockVerifyOTPOrBackupCode.mockResolvedValue({ verified: true });
- mockUpdateUser.mockResolvedValue({ email: 'a@b.com' });
-
- await enable2FA(req, res);
-
- expect(mockVerifyOTPOrBackupCode).toHaveBeenCalledWith({
- user: existingUser,
- token: '123456',
- backupCode: undefined,
- persistBackupUse: false,
- });
- expect(res.status).toHaveBeenCalledWith(200);
- const updateCall = mockUpdateUser.mock.calls[0][1];
- expect(updateCall).toHaveProperty('pendingTotpSecret', 'encrypted-secret');
- expect(updateCall).toHaveProperty('pendingBackupCodes', CODE_OBJECTS);
- expect(updateCall).not.toHaveProperty('twoFactorEnabled');
- expect(updateCall).not.toHaveProperty('totpSecret');
- });
-
- it('allows re-enrollment with valid backup code (persistBackupUse: false)', async () => {
- const req = { user: { id: 'user1' }, body: { backupCode: 'backup123' } };
- const res = createRes();
- const existingUser = {
- _id: 'user1',
- twoFactorEnabled: true,
- totpSecret: 'enc-secret',
- email: 'a@b.com',
- };
- mockGetUserById.mockResolvedValue(existingUser);
- mockVerifyOTPOrBackupCode.mockResolvedValue({ verified: true });
- mockUpdateUser.mockResolvedValue({ email: 'a@b.com' });
-
- await enable2FA(req, res);
-
- expect(mockVerifyOTPOrBackupCode).toHaveBeenCalledWith(
- expect.objectContaining({ persistBackupUse: false }),
- );
- expect(res.status).toHaveBeenCalledWith(200);
- });
-
- it('returns error when no token provided and 2FA is enabled', async () => {
- const req = { user: { id: 'user1' }, body: {} };
- const res = createRes();
- mockGetUserById.mockResolvedValue({
- _id: 'user1',
- twoFactorEnabled: true,
- totpSecret: 'enc-secret',
- });
- mockVerifyOTPOrBackupCode.mockResolvedValue({ verified: false, status: 400 });
-
- await enable2FA(req, res);
-
- expect(res.status).toHaveBeenCalledWith(400);
- expect(mockUpdateUser).not.toHaveBeenCalled();
- });
-
- it('returns 401 when invalid token provided and 2FA is enabled', async () => {
- const req = { user: { id: 'user1' }, body: { token: 'wrong' } };
- const res = createRes();
- mockGetUserById.mockResolvedValue({
- _id: 'user1',
- twoFactorEnabled: true,
- totpSecret: 'enc-secret',
- });
- mockVerifyOTPOrBackupCode.mockResolvedValue({
- verified: false,
- status: 401,
- message: 'Invalid token or backup code',
- });
-
- await enable2FA(req, res);
-
- expect(res.status).toHaveBeenCalledWith(401);
- expect(res.json).toHaveBeenCalledWith({ message: 'Invalid token or backup code' });
- expect(mockUpdateUser).not.toHaveBeenCalled();
- });
-});
-
-describe('regenerateBackupCodes', () => {
- it('returns 404 when user not found', async () => {
- const req = { user: { id: 'user1' }, body: {} };
- const res = createRes();
- mockGetUserById.mockResolvedValue(null);
-
- await regenerateBackupCodes(req, res);
-
- expect(res.status).toHaveBeenCalledWith(404);
- expect(res.json).toHaveBeenCalledWith({ message: 'User not found' });
- });
-
- it('requires OTP when 2FA is enabled', async () => {
- const req = { user: { id: 'user1' }, body: { token: '123456' } };
- const res = createRes();
- mockGetUserById.mockResolvedValue({
- _id: 'user1',
- twoFactorEnabled: true,
- totpSecret: 'enc-secret',
- });
- mockVerifyOTPOrBackupCode.mockResolvedValue({ verified: true });
- mockUpdateUser.mockResolvedValue({});
-
- await regenerateBackupCodes(req, res);
-
- expect(mockVerifyOTPOrBackupCode).toHaveBeenCalled();
- expect(res.status).toHaveBeenCalledWith(200);
- expect(res.json).toHaveBeenCalledWith({
- backupCodes: PLAIN_CODES,
- backupCodesHash: CODE_OBJECTS,
- });
- });
-
- it('returns error when no token provided and 2FA is enabled', async () => {
- const req = { user: { id: 'user1' }, body: {} };
- const res = createRes();
- mockGetUserById.mockResolvedValue({
- _id: 'user1',
- twoFactorEnabled: true,
- totpSecret: 'enc-secret',
- });
- mockVerifyOTPOrBackupCode.mockResolvedValue({ verified: false, status: 400 });
-
- await regenerateBackupCodes(req, res);
-
- expect(res.status).toHaveBeenCalledWith(400);
- });
-
- it('returns 401 when invalid token provided and 2FA is enabled', async () => {
- const req = { user: { id: 'user1' }, body: { token: 'wrong' } };
- const res = createRes();
- mockGetUserById.mockResolvedValue({
- _id: 'user1',
- twoFactorEnabled: true,
- totpSecret: 'enc-secret',
- });
- mockVerifyOTPOrBackupCode.mockResolvedValue({
- verified: false,
- status: 401,
- message: 'Invalid token or backup code',
- });
-
- await regenerateBackupCodes(req, res);
-
- expect(res.status).toHaveBeenCalledWith(401);
- expect(res.json).toHaveBeenCalledWith({ message: 'Invalid token or backup code' });
- });
-
- it('includes backupCodesHash in response', async () => {
- const req = { user: { id: 'user1' }, body: { token: '123456' } };
- const res = createRes();
- mockGetUserById.mockResolvedValue({
- _id: 'user1',
- twoFactorEnabled: true,
- totpSecret: 'enc-secret',
- });
- mockVerifyOTPOrBackupCode.mockResolvedValue({ verified: true });
- mockUpdateUser.mockResolvedValue({});
-
- await regenerateBackupCodes(req, res);
-
- const responseBody = res.json.mock.calls[0][0];
- expect(responseBody).toHaveProperty('backupCodesHash', CODE_OBJECTS);
- expect(responseBody).toHaveProperty('backupCodes', PLAIN_CODES);
- });
-
- it('allows regeneration without token when 2FA is not enabled', async () => {
- const req = { user: { id: 'user1' }, body: {} };
- const res = createRes();
- mockGetUserById.mockResolvedValue({
- _id: 'user1',
- twoFactorEnabled: false,
- });
- mockUpdateUser.mockResolvedValue({});
-
- await regenerateBackupCodes(req, res);
-
- expect(mockVerifyOTPOrBackupCode).not.toHaveBeenCalled();
- expect(res.status).toHaveBeenCalledWith(200);
- expect(res.json).toHaveBeenCalledWith({
- backupCodes: PLAIN_CODES,
- backupCodesHash: CODE_OBJECTS,
- });
- });
-});
diff --git a/api/server/controllers/__tests__/deleteUser.spec.js b/api/server/controllers/__tests__/deleteUser.spec.js
deleted file mode 100644
index a382a6cdc7..0000000000
--- a/api/server/controllers/__tests__/deleteUser.spec.js
+++ /dev/null
@@ -1,287 +0,0 @@
-const mockGetUserById = jest.fn();
-const mockDeleteMessages = jest.fn();
-const mockDeleteAllUserSessions = jest.fn();
-const mockDeleteUserById = jest.fn();
-const mockDeleteAllSharedLinks = jest.fn();
-const mockDeletePresets = jest.fn();
-const mockDeleteUserKey = jest.fn();
-const mockDeleteConvos = jest.fn();
-const mockDeleteFiles = jest.fn();
-const mockGetFiles = jest.fn();
-const mockUpdateUserPlugins = jest.fn();
-const mockUpdateUser = jest.fn();
-const mockFindToken = jest.fn();
-const mockVerifyOTPOrBackupCode = jest.fn();
-const mockDeleteUserPluginAuth = jest.fn();
-const mockProcessDeleteRequest = jest.fn();
-const mockDeleteToolCalls = jest.fn();
-const mockDeleteUserAgents = jest.fn();
-const mockDeleteUserPrompts = jest.fn();
-
-jest.mock('@librechat/data-schemas', () => ({
- logger: { error: jest.fn(), info: jest.fn() },
- webSearchKeys: [],
-}));
-
-jest.mock('librechat-data-provider', () => ({
- Tools: {},
- CacheKeys: {},
- Constants: { mcp_delimiter: '::', mcp_prefix: 'mcp_' },
- FileSources: {},
-}));
-
-jest.mock('@librechat/api', () => ({
- MCPOAuthHandler: {},
- MCPTokenStorage: {},
- normalizeHttpError: jest.fn(),
- extractWebSearchEnvVars: jest.fn(),
- needsRefresh: jest.fn(),
- getNewS3URL: jest.fn(),
-}));
-
-jest.mock('~/models', () => ({
- deleteAllUserSessions: (...args) => mockDeleteAllUserSessions(...args),
- deleteAllSharedLinks: (...args) => mockDeleteAllSharedLinks(...args),
- updateUserPlugins: (...args) => mockUpdateUserPlugins(...args),
- deleteUserById: (...args) => mockDeleteUserById(...args),
- deleteMessages: (...args) => mockDeleteMessages(...args),
- deletePresets: (...args) => mockDeletePresets(...args),
- deleteUserKey: (...args) => mockDeleteUserKey(...args),
- getUserById: (...args) => mockGetUserById(...args),
- deleteConvos: (...args) => mockDeleteConvos(...args),
- deleteFiles: (...args) => mockDeleteFiles(...args),
- updateUser: (...args) => mockUpdateUser(...args),
- findToken: (...args) => mockFindToken(...args),
- getFiles: (...args) => mockGetFiles(...args),
- deleteToolCalls: (...args) => mockDeleteToolCalls(...args),
- deleteUserAgents: (...args) => mockDeleteUserAgents(...args),
- deleteUserPrompts: (...args) => mockDeleteUserPrompts(...args),
- deleteTransactions: jest.fn(),
- deleteBalances: jest.fn(),
- deleteAllAgentApiKeys: jest.fn(),
- deleteAssistants: jest.fn(),
- deleteConversationTags: jest.fn(),
- deleteAllUserMemories: jest.fn(),
- deleteActions: jest.fn(),
- deleteTokens: jest.fn(),
- removeUserFromAllGroups: jest.fn(),
- deleteAclEntries: jest.fn(),
- getSoleOwnedResourceIds: jest.fn().mockResolvedValue([]),
-}));
-
-jest.mock('~/server/services/PluginService', () => ({
- updateUserPluginAuth: jest.fn(),
- deleteUserPluginAuth: (...args) => mockDeleteUserPluginAuth(...args),
-}));
-
-jest.mock('~/server/services/twoFactorService', () => ({
- verifyOTPOrBackupCode: (...args) => mockVerifyOTPOrBackupCode(...args),
-}));
-
-jest.mock('~/server/services/AuthService', () => ({
- verifyEmail: jest.fn(),
- resendVerificationEmail: jest.fn(),
-}));
-
-jest.mock('~/config', () => ({
- getMCPManager: jest.fn(),
- getFlowStateManager: jest.fn(),
- getMCPServersRegistry: jest.fn(),
-}));
-
-jest.mock('~/server/services/Config/getCachedTools', () => ({
- invalidateCachedTools: jest.fn(),
-}));
-
-jest.mock('~/server/services/Files/process', () => ({
- processDeleteRequest: (...args) => mockProcessDeleteRequest(...args),
-}));
-
-jest.mock('~/server/services/Config', () => ({
- getAppConfig: jest.fn(),
-}));
-
-jest.mock('~/cache', () => ({
- getLogStores: jest.fn(),
-}));
-
-const { deleteUserController } = require('~/server/controllers/UserController');
-
-function createRes() {
- const res = {};
- res.status = jest.fn().mockReturnValue(res);
- res.json = jest.fn().mockReturnValue(res);
- res.send = jest.fn().mockReturnValue(res);
- return res;
-}
-
-function stubDeletionMocks() {
- mockDeleteMessages.mockResolvedValue();
- mockDeleteAllUserSessions.mockResolvedValue();
- mockDeleteUserKey.mockResolvedValue();
- mockDeletePresets.mockResolvedValue();
- mockDeleteConvos.mockResolvedValue();
- mockDeleteUserPluginAuth.mockResolvedValue();
- mockDeleteUserById.mockResolvedValue();
- mockDeleteAllSharedLinks.mockResolvedValue();
- mockGetFiles.mockResolvedValue([]);
- mockProcessDeleteRequest.mockResolvedValue();
- mockDeleteFiles.mockResolvedValue();
- mockDeleteToolCalls.mockResolvedValue();
- mockDeleteUserAgents.mockResolvedValue();
- mockDeleteUserPrompts.mockResolvedValue();
-}
-
-beforeEach(() => {
- jest.clearAllMocks();
- stubDeletionMocks();
-});
-
-describe('deleteUserController - 2FA enforcement', () => {
- it('proceeds with deletion when 2FA is not enabled', async () => {
- const req = { user: { id: 'user1', _id: 'user1', email: 'a@b.com' }, body: {} };
- const res = createRes();
- mockGetUserById.mockResolvedValue({ _id: 'user1', twoFactorEnabled: false });
-
- await deleteUserController(req, res);
-
- expect(res.status).toHaveBeenCalledWith(200);
- expect(res.send).toHaveBeenCalledWith({ message: 'User deleted' });
- expect(mockDeleteMessages).toHaveBeenCalled();
- expect(mockVerifyOTPOrBackupCode).not.toHaveBeenCalled();
- });
-
- it('proceeds with deletion when user has no 2FA record', async () => {
- const req = { user: { id: 'user1', _id: 'user1', email: 'a@b.com' }, body: {} };
- const res = createRes();
- mockGetUserById.mockResolvedValue(null);
-
- await deleteUserController(req, res);
-
- expect(res.status).toHaveBeenCalledWith(200);
- expect(res.send).toHaveBeenCalledWith({ message: 'User deleted' });
- });
-
- it('returns error when 2FA is enabled and verification fails with 400', async () => {
- const req = { user: { id: 'user1', _id: 'user1' }, body: {} };
- const res = createRes();
- mockGetUserById.mockResolvedValue({
- _id: 'user1',
- twoFactorEnabled: true,
- totpSecret: 'enc-secret',
- });
- mockVerifyOTPOrBackupCode.mockResolvedValue({ verified: false, status: 400 });
-
- await deleteUserController(req, res);
-
- expect(res.status).toHaveBeenCalledWith(400);
- expect(mockDeleteMessages).not.toHaveBeenCalled();
- });
-
- it('returns 401 when 2FA is enabled and invalid TOTP token provided', async () => {
- const existingUser = {
- _id: 'user1',
- twoFactorEnabled: true,
- totpSecret: 'enc-secret',
- };
- const req = { user: { id: 'user1', _id: 'user1' }, body: { token: 'wrong' } };
- const res = createRes();
- mockGetUserById.mockResolvedValue(existingUser);
- mockVerifyOTPOrBackupCode.mockResolvedValue({
- verified: false,
- status: 401,
- message: 'Invalid token or backup code',
- });
-
- await deleteUserController(req, res);
-
- expect(mockVerifyOTPOrBackupCode).toHaveBeenCalledWith({
- user: existingUser,
- token: 'wrong',
- backupCode: undefined,
- });
- expect(res.status).toHaveBeenCalledWith(401);
- expect(res.json).toHaveBeenCalledWith({ message: 'Invalid token or backup code' });
- expect(mockDeleteMessages).not.toHaveBeenCalled();
- });
-
- it('returns 401 when 2FA is enabled and invalid backup code provided', async () => {
- const existingUser = {
- _id: 'user1',
- twoFactorEnabled: true,
- totpSecret: 'enc-secret',
- backupCodes: [],
- };
- const req = { user: { id: 'user1', _id: 'user1' }, body: { backupCode: 'bad-code' } };
- const res = createRes();
- mockGetUserById.mockResolvedValue(existingUser);
- mockVerifyOTPOrBackupCode.mockResolvedValue({
- verified: false,
- status: 401,
- message: 'Invalid token or backup code',
- });
-
- await deleteUserController(req, res);
-
- expect(mockVerifyOTPOrBackupCode).toHaveBeenCalledWith({
- user: existingUser,
- token: undefined,
- backupCode: 'bad-code',
- });
- expect(res.status).toHaveBeenCalledWith(401);
- expect(mockDeleteMessages).not.toHaveBeenCalled();
- });
-
- it('deletes account when valid TOTP token provided with 2FA enabled', async () => {
- const existingUser = {
- _id: 'user1',
- twoFactorEnabled: true,
- totpSecret: 'enc-secret',
- };
- const req = {
- user: { id: 'user1', _id: 'user1', email: 'a@b.com' },
- body: { token: '123456' },
- };
- const res = createRes();
- mockGetUserById.mockResolvedValue(existingUser);
- mockVerifyOTPOrBackupCode.mockResolvedValue({ verified: true });
-
- await deleteUserController(req, res);
-
- expect(mockVerifyOTPOrBackupCode).toHaveBeenCalledWith({
- user: existingUser,
- token: '123456',
- backupCode: undefined,
- });
- expect(res.status).toHaveBeenCalledWith(200);
- expect(res.send).toHaveBeenCalledWith({ message: 'User deleted' });
- expect(mockDeleteMessages).toHaveBeenCalled();
- });
-
- it('deletes account when valid backup code provided with 2FA enabled', async () => {
- const existingUser = {
- _id: 'user1',
- twoFactorEnabled: true,
- totpSecret: 'enc-secret',
- backupCodes: [{ codeHash: 'h1', used: false }],
- };
- const req = {
- user: { id: 'user1', _id: 'user1', email: 'a@b.com' },
- body: { backupCode: 'valid-code' },
- };
- const res = createRes();
- mockGetUserById.mockResolvedValue(existingUser);
- mockVerifyOTPOrBackupCode.mockResolvedValue({ verified: true });
-
- await deleteUserController(req, res);
-
- expect(mockVerifyOTPOrBackupCode).toHaveBeenCalledWith({
- user: existingUser,
- token: undefined,
- backupCode: 'valid-code',
- });
- expect(res.status).toHaveBeenCalledWith(200);
- expect(res.send).toHaveBeenCalledWith({ message: 'User deleted' });
- expect(mockDeleteMessages).toHaveBeenCalled();
- });
-});
diff --git a/api/server/controllers/__tests__/deleteUserMcpServers.spec.js b/api/server/controllers/__tests__/deleteUserMcpServers.spec.js
deleted file mode 100644
index fcb3211f24..0000000000
--- a/api/server/controllers/__tests__/deleteUserMcpServers.spec.js
+++ /dev/null
@@ -1,319 +0,0 @@
-const mockGetMCPManager = jest.fn();
-const mockInvalidateCachedTools = jest.fn();
-
-jest.mock('~/config', () => ({
- getMCPManager: (...args) => mockGetMCPManager(...args),
- getFlowStateManager: jest.fn(),
- getMCPServersRegistry: jest.fn(),
-}));
-
-jest.mock('~/server/services/Config/getCachedTools', () => ({
- invalidateCachedTools: (...args) => mockInvalidateCachedTools(...args),
-}));
-
-jest.mock('~/server/services/Config', () => ({
- getAppConfig: jest.fn(),
- getMCPServerTools: jest.fn(),
-}));
-
-const mongoose = require('mongoose');
-const { mcpServerSchema } = require('@librechat/data-schemas');
-const { MongoMemoryServer } = require('mongodb-memory-server');
-const {
- ResourceType,
- AccessRoleIds,
- PrincipalType,
- PermissionBits,
-} = require('librechat-data-provider');
-const permissionService = require('~/server/services/PermissionService');
-const { deleteUserMcpServers } = require('~/server/controllers/UserController');
-const { AclEntry, AccessRole } = require('~/db/models');
-
-let MCPServer;
-
-describe('deleteUserMcpServers', () => {
- let mongoServer;
-
- beforeAll(async () => {
- mongoServer = await MongoMemoryServer.create();
- const mongoUri = mongoServer.getUri();
- MCPServer = mongoose.models.MCPServer || mongoose.model('MCPServer', mcpServerSchema);
- await mongoose.connect(mongoUri);
-
- await AccessRole.create({
- accessRoleId: AccessRoleIds.MCPSERVER_OWNER,
- name: 'MCP Server Owner',
- resourceType: ResourceType.MCPSERVER,
- permBits:
- PermissionBits.VIEW | PermissionBits.EDIT | PermissionBits.DELETE | PermissionBits.SHARE,
- });
-
- await AccessRole.create({
- accessRoleId: AccessRoleIds.MCPSERVER_VIEWER,
- name: 'MCP Server Viewer',
- resourceType: ResourceType.MCPSERVER,
- permBits: PermissionBits.VIEW,
- });
- }, 20000);
-
- afterAll(async () => {
- await mongoose.disconnect();
- await mongoServer.stop();
- });
-
- beforeEach(async () => {
- await MCPServer.deleteMany({});
- await AclEntry.deleteMany({});
- jest.clearAllMocks();
- });
-
- test('should delete solely-owned MCP servers and their ACL entries', async () => {
- const userId = new mongoose.Types.ObjectId();
-
- const server = await MCPServer.create({
- serverName: 'sole-owned-server',
- config: { title: 'Test Server' },
- author: userId,
- });
-
- await permissionService.grantPermission({
- principalType: PrincipalType.USER,
- principalId: userId,
- resourceType: ResourceType.MCPSERVER,
- resourceId: server._id,
- accessRoleId: AccessRoleIds.MCPSERVER_OWNER,
- grantedBy: userId,
- });
-
- mockGetMCPManager.mockReturnValue({
- disconnectUserConnection: jest.fn().mockResolvedValue(undefined),
- });
-
- await deleteUserMcpServers(userId.toString());
-
- expect(await MCPServer.findById(server._id)).toBeNull();
-
- const aclEntries = await AclEntry.find({
- resourceType: ResourceType.MCPSERVER,
- resourceId: server._id,
- });
- expect(aclEntries).toHaveLength(0);
- });
-
- test('should disconnect MCP sessions and invalidate tool cache before deletion', async () => {
- const userId = new mongoose.Types.ObjectId();
- const mockDisconnect = jest.fn().mockResolvedValue(undefined);
-
- const server = await MCPServer.create({
- serverName: 'session-server',
- config: { title: 'Session Server' },
- author: userId,
- });
-
- await permissionService.grantPermission({
- principalType: PrincipalType.USER,
- principalId: userId,
- resourceType: ResourceType.MCPSERVER,
- resourceId: server._id,
- accessRoleId: AccessRoleIds.MCPSERVER_OWNER,
- grantedBy: userId,
- });
-
- mockGetMCPManager.mockReturnValue({ disconnectUserConnection: mockDisconnect });
-
- await deleteUserMcpServers(userId.toString());
-
- expect(mockDisconnect).toHaveBeenCalledWith(userId.toString(), 'session-server');
- expect(mockInvalidateCachedTools).toHaveBeenCalledWith({
- userId: userId.toString(),
- serverName: 'session-server',
- });
- });
-
- test('should preserve multi-owned MCP servers', async () => {
- const deletingUserId = new mongoose.Types.ObjectId();
- const otherOwnerId = new mongoose.Types.ObjectId();
-
- const soleServer = await MCPServer.create({
- serverName: 'sole-server',
- config: { title: 'Sole Server' },
- author: deletingUserId,
- });
-
- const multiServer = await MCPServer.create({
- serverName: 'multi-server',
- config: { title: 'Multi Server' },
- author: deletingUserId,
- });
-
- await permissionService.grantPermission({
- principalType: PrincipalType.USER,
- principalId: deletingUserId,
- resourceType: ResourceType.MCPSERVER,
- resourceId: soleServer._id,
- accessRoleId: AccessRoleIds.MCPSERVER_OWNER,
- grantedBy: deletingUserId,
- });
-
- await permissionService.grantPermission({
- principalType: PrincipalType.USER,
- principalId: deletingUserId,
- resourceType: ResourceType.MCPSERVER,
- resourceId: multiServer._id,
- accessRoleId: AccessRoleIds.MCPSERVER_OWNER,
- grantedBy: deletingUserId,
- });
- await permissionService.grantPermission({
- principalType: PrincipalType.USER,
- principalId: otherOwnerId,
- resourceType: ResourceType.MCPSERVER,
- resourceId: multiServer._id,
- accessRoleId: AccessRoleIds.MCPSERVER_OWNER,
- grantedBy: otherOwnerId,
- });
-
- mockGetMCPManager.mockReturnValue({
- disconnectUserConnection: jest.fn().mockResolvedValue(undefined),
- });
-
- await deleteUserMcpServers(deletingUserId.toString());
-
- expect(await MCPServer.findById(soleServer._id)).toBeNull();
- expect(await MCPServer.findById(multiServer._id)).not.toBeNull();
-
- const soleAcl = await AclEntry.find({
- resourceType: ResourceType.MCPSERVER,
- resourceId: soleServer._id,
- });
- expect(soleAcl).toHaveLength(0);
-
- const multiAclOther = await AclEntry.find({
- resourceType: ResourceType.MCPSERVER,
- resourceId: multiServer._id,
- principalId: otherOwnerId,
- });
- expect(multiAclOther).toHaveLength(1);
- expect(multiAclOther[0].permBits & PermissionBits.DELETE).toBeTruthy();
-
- const multiAclDeleting = await AclEntry.find({
- resourceType: ResourceType.MCPSERVER,
- resourceId: multiServer._id,
- principalId: deletingUserId,
- });
- expect(multiAclDeleting).toHaveLength(1);
- });
-
- test('should be a no-op when user has no owned MCP servers', async () => {
- const userId = new mongoose.Types.ObjectId();
-
- const otherUserId = new mongoose.Types.ObjectId();
- const server = await MCPServer.create({
- serverName: 'other-server',
- config: { title: 'Other Server' },
- author: otherUserId,
- });
-
- await permissionService.grantPermission({
- principalType: PrincipalType.USER,
- principalId: otherUserId,
- resourceType: ResourceType.MCPSERVER,
- resourceId: server._id,
- accessRoleId: AccessRoleIds.MCPSERVER_OWNER,
- grantedBy: otherUserId,
- });
-
- await deleteUserMcpServers(userId.toString());
-
- expect(await MCPServer.findById(server._id)).not.toBeNull();
- expect(mockGetMCPManager).not.toHaveBeenCalled();
- });
-
- test('should handle gracefully when MCPServer model is not registered', async () => {
- const originalModel = mongoose.models.MCPServer;
- delete mongoose.models.MCPServer;
-
- try {
- const userId = new mongoose.Types.ObjectId();
- await expect(deleteUserMcpServers(userId.toString())).resolves.toBeUndefined();
- } finally {
- mongoose.models.MCPServer = originalModel;
- }
- });
-
- test('should handle gracefully when MCPManager is not available', async () => {
- const userId = new mongoose.Types.ObjectId();
-
- const server = await MCPServer.create({
- serverName: 'no-manager-server',
- config: { title: 'No Manager Server' },
- author: userId,
- });
-
- await permissionService.grantPermission({
- principalType: PrincipalType.USER,
- principalId: userId,
- resourceType: ResourceType.MCPSERVER,
- resourceId: server._id,
- accessRoleId: AccessRoleIds.MCPSERVER_OWNER,
- grantedBy: userId,
- });
-
- mockGetMCPManager.mockReturnValue(null);
-
- await deleteUserMcpServers(userId.toString());
-
- expect(await MCPServer.findById(server._id)).toBeNull();
- });
-
- test('should delete legacy MCP servers that have author but no ACL entries', async () => {
- const legacyUserId = new mongoose.Types.ObjectId();
-
- const legacyServer = await MCPServer.create({
- serverName: 'legacy-server',
- config: { title: 'Legacy Server' },
- author: legacyUserId,
- });
-
- mockGetMCPManager.mockReturnValue({
- disconnectUserConnection: jest.fn().mockResolvedValue(undefined),
- });
-
- await deleteUserMcpServers(legacyUserId.toString());
-
- expect(await MCPServer.findById(legacyServer._id)).toBeNull();
- });
-
- test('should delete both ACL-owned and legacy servers in one call', async () => {
- const userId = new mongoose.Types.ObjectId();
-
- const aclServer = await MCPServer.create({
- serverName: 'acl-server',
- config: { title: 'ACL Server' },
- author: userId,
- });
-
- await permissionService.grantPermission({
- principalType: PrincipalType.USER,
- principalId: userId,
- resourceType: ResourceType.MCPSERVER,
- resourceId: aclServer._id,
- accessRoleId: AccessRoleIds.MCPSERVER_OWNER,
- grantedBy: userId,
- });
-
- const legacyServer = await MCPServer.create({
- serverName: 'legacy-mixed-server',
- config: { title: 'Legacy Mixed' },
- author: userId,
- });
-
- mockGetMCPManager.mockReturnValue({
- disconnectUserConnection: jest.fn().mockResolvedValue(undefined),
- });
-
- await deleteUserMcpServers(userId.toString());
-
- expect(await MCPServer.findById(aclServer._id)).toBeNull();
- expect(await MCPServer.findById(legacyServer._id)).toBeNull();
- });
-});
diff --git a/api/server/controllers/__tests__/deleteUserResourceCoverage.spec.js b/api/server/controllers/__tests__/deleteUserResourceCoverage.spec.js
deleted file mode 100644
index b08e502800..0000000000
--- a/api/server/controllers/__tests__/deleteUserResourceCoverage.spec.js
+++ /dev/null
@@ -1,53 +0,0 @@
-const fs = require('fs');
-const path = require('path');
-const { ResourceType } = require('librechat-data-provider');
-
-/**
- * Maps each ResourceType to the cleanup function name that must appear in
- * deleteUserController's source to prove it is handled during user deletion.
- *
- * When a new ResourceType is added, this test will fail until a corresponding
- * entry is added here (or to NO_USER_CLEANUP_NEEDED) AND the actual cleanup
- * logic is implemented.
- */
-const HANDLED_RESOURCE_TYPES = {
- [ResourceType.AGENT]: 'deleteUserAgents',
- [ResourceType.REMOTE_AGENT]: 'deleteUserAgents',
- [ResourceType.PROMPTGROUP]: 'deleteUserPrompts',
- [ResourceType.MCPSERVER]: 'deleteUserMcpServers',
-};
-
-/**
- * ResourceTypes that are ACL-tracked but have no per-user deletion semantics
- * (e.g., system resources, public-only). Must be explicitly listed here with
- * a justification to prevent silent omissions.
- */
-const NO_USER_CLEANUP_NEEDED = new Set([
- // Example: ResourceType.SYSTEM_TEMPLATE — public/system; not user-owned
-]);
-
-describe('deleteUserController - resource type coverage guard', () => {
- let controllerSource;
-
- beforeAll(() => {
- controllerSource = fs.readFileSync(path.resolve(__dirname, '../UserController.js'), 'utf-8');
- });
-
- test('every ResourceType must have a documented cleanup handler or explicit exclusion', () => {
- const allTypes = Object.values(ResourceType);
- const handledTypes = Object.keys(HANDLED_RESOURCE_TYPES);
- const unhandledTypes = allTypes.filter(
- (t) => !handledTypes.includes(t) && !NO_USER_CLEANUP_NEEDED.has(t),
- );
-
- expect(unhandledTypes).toEqual([]);
- });
-
- test('every cleanup handler referenced in HANDLED_RESOURCE_TYPES must appear in the controller source', () => {
- const uniqueHandlers = [...new Set(Object.values(HANDLED_RESOURCE_TYPES))];
-
- for (const handler of uniqueHandlers) {
- expect(controllerSource).toContain(handler);
- }
- });
-});
diff --git a/api/server/controllers/agents/__tests__/openai.spec.js b/api/server/controllers/agents/__tests__/openai.spec.js
index c959be6cf4..8592c79a2d 100644
--- a/api/server/controllers/agents/__tests__/openai.spec.js
+++ b/api/server/controllers/agents/__tests__/openai.spec.js
@@ -3,7 +3,6 @@
* Tests that recordCollectedUsage is called correctly for token spending
*/
-const mockProcessStream = jest.fn().mockResolvedValue(undefined);
const mockSpendTokens = jest.fn().mockResolvedValue({});
const mockSpendStructuredTokens = jest.fn().mockResolvedValue({});
const mockRecordCollectedUsage = jest
@@ -36,7 +35,7 @@ jest.mock('@librechat/agents', () => ({
jest.mock('@librechat/api', () => ({
writeSSE: jest.fn(),
createRun: jest.fn().mockResolvedValue({
- processStream: mockProcessStream,
+ processStream: jest.fn().mockResolvedValue(undefined),
}),
createChunk: jest.fn().mockReturnValue({}),
buildToolSet: jest.fn().mockReturnValue(new Set()),
@@ -69,7 +68,6 @@ jest.mock('@librechat/api', () => ({
toolCalls: new Map(),
usage: { promptTokens: 100, completionTokens: 50, reasoningTokens: 0 },
}),
- resolveRecursionLimit: jest.fn().mockReturnValue(50),
createToolExecuteHandler: jest.fn().mockReturnValue({ handle: jest.fn() }),
isChatCompletionValidationFailure: jest.fn().mockReturnValue(false),
}));
@@ -79,25 +77,33 @@ jest.mock('~/server/services/ToolService', () => ({
loadToolsForExecution: jest.fn().mockResolvedValue([]),
}));
-const mockGetMultiplier = jest.fn().mockReturnValue(1);
-const mockGetCacheMultiplier = jest.fn().mockReturnValue(null);
+jest.mock('~/models/spendTokens', () => ({
+ spendTokens: mockSpendTokens,
+ spendStructuredTokens: mockSpendStructuredTokens,
+}));
jest.mock('~/server/controllers/agents/callbacks', () => ({
createToolEndCallback: jest.fn().mockReturnValue(jest.fn()),
- buildSummarizationHandlers: jest.fn().mockReturnValue({}),
- markSummarizationUsage: jest.fn().mockImplementation((usage) => usage),
- agentLogHandlerObj: { handle: jest.fn() },
}));
jest.mock('~/server/services/PermissionService', () => ({
findAccessibleResources: jest.fn().mockResolvedValue([]),
}));
-const mockUpdateBalance = jest.fn().mockResolvedValue({});
-const mockBulkInsertTransactions = jest.fn().mockResolvedValue(undefined);
+jest.mock('~/models/Conversation', () => ({
+ getConvoFiles: jest.fn().mockResolvedValue([]),
+}));
+
+jest.mock('~/models/Agent', () => ({
+ getAgent: jest.fn().mockResolvedValue({
+ id: 'agent-123',
+ provider: 'openAI',
+ model_parameters: { model: 'gpt-4' },
+ }),
+ getAgents: jest.fn().mockResolvedValue([]),
+}));
jest.mock('~/models', () => ({
- getAgent: jest.fn().mockResolvedValue({ id: 'agent-123', name: 'Test Agent' }),
getFiles: jest.fn(),
getUserKey: jest.fn(),
getMessages: jest.fn(),
@@ -106,14 +112,6 @@ jest.mock('~/models', () => ({
getUserCodeFiles: jest.fn(),
getToolFilesByIds: jest.fn(),
getCodeGeneratedFiles: jest.fn(),
- updateBalance: mockUpdateBalance,
- bulkInsertTransactions: mockBulkInsertTransactions,
- spendTokens: mockSpendTokens,
- spendStructuredTokens: mockSpendStructuredTokens,
- getMultiplier: mockGetMultiplier,
- getCacheMultiplier: mockGetCacheMultiplier,
- getConvoFiles: jest.fn().mockResolvedValue([]),
- getConvo: jest.fn().mockResolvedValue(null),
}));
describe('OpenAIChatCompletionController', () => {
@@ -151,92 +149,13 @@ describe('OpenAIChatCompletionController', () => {
};
});
- describe('conversation ownership validation', () => {
- it('should skip ownership check when conversation_id is not provided', async () => {
- const { getConvo } = require('~/models');
- await OpenAIChatCompletionController(req, res);
- expect(getConvo).not.toHaveBeenCalled();
- });
-
- it('should return 400 when conversation_id is not a string', async () => {
- const { validateRequest } = require('@librechat/api');
- validateRequest.mockReturnValueOnce({
- request: { model: 'agent-123', messages: [], stream: false, conversation_id: { $gt: '' } },
- });
-
- await OpenAIChatCompletionController(req, res);
- expect(res.status).toHaveBeenCalledWith(400);
- });
-
- it('should return 404 when conversation is not owned by user', async () => {
- const { validateRequest } = require('@librechat/api');
- const { getConvo } = require('~/models');
- validateRequest.mockReturnValueOnce({
- request: {
- model: 'agent-123',
- messages: [],
- stream: false,
- conversation_id: 'convo-abc',
- },
- });
- getConvo.mockResolvedValueOnce(null);
-
- await OpenAIChatCompletionController(req, res);
- expect(getConvo).toHaveBeenCalledWith('user-123', 'convo-abc');
- expect(res.status).toHaveBeenCalledWith(404);
- });
-
- it('should proceed when conversation is owned by user', async () => {
- const { validateRequest } = require('@librechat/api');
- const { getConvo } = require('~/models');
- validateRequest.mockReturnValueOnce({
- request: {
- model: 'agent-123',
- messages: [],
- stream: false,
- conversation_id: 'convo-abc',
- },
- });
- getConvo.mockResolvedValueOnce({ conversationId: 'convo-abc', user: 'user-123' });
-
- await OpenAIChatCompletionController(req, res);
- expect(getConvo).toHaveBeenCalledWith('user-123', 'convo-abc');
- expect(res.status).not.toHaveBeenCalledWith(404);
- });
-
- it('should return 500 when getConvo throws a DB error', async () => {
- const { validateRequest } = require('@librechat/api');
- const { getConvo } = require('~/models');
- validateRequest.mockReturnValueOnce({
- request: {
- model: 'agent-123',
- messages: [],
- stream: false,
- conversation_id: 'convo-abc',
- },
- });
- getConvo.mockRejectedValueOnce(new Error('DB connection failed'));
-
- await OpenAIChatCompletionController(req, res);
- expect(res.status).toHaveBeenCalledWith(500);
- });
- });
-
describe('token usage recording', () => {
it('should call recordCollectedUsage after successful non-streaming completion', async () => {
await OpenAIChatCompletionController(req, res);
expect(mockRecordCollectedUsage).toHaveBeenCalledTimes(1);
expect(mockRecordCollectedUsage).toHaveBeenCalledWith(
- {
- spendTokens: mockSpendTokens,
- spendStructuredTokens: mockSpendStructuredTokens,
- pricing: { getMultiplier: mockGetMultiplier, getCacheMultiplier: mockGetCacheMultiplier },
- bulkWriteOps: {
- insertMany: mockBulkInsertTransactions,
- updateBalance: mockUpdateBalance,
- },
- },
+ { spendTokens: mockSpendTokens, spendStructuredTokens: mockSpendStructuredTokens },
expect.objectContaining({
user: 'user-123',
conversationId: expect.any(String),
@@ -263,18 +182,12 @@ describe('OpenAIChatCompletionController', () => {
);
});
- it('should pass spendTokens, spendStructuredTokens, pricing, and bulkWriteOps as dependencies', async () => {
+ it('should pass spendTokens and spendStructuredTokens as dependencies', async () => {
await OpenAIChatCompletionController(req, res);
const [deps] = mockRecordCollectedUsage.mock.calls[0];
expect(deps).toHaveProperty('spendTokens', mockSpendTokens);
expect(deps).toHaveProperty('spendStructuredTokens', mockSpendStructuredTokens);
- expect(deps).toHaveProperty('pricing');
- expect(deps.pricing).toHaveProperty('getMultiplier', mockGetMultiplier);
- expect(deps.pricing).toHaveProperty('getCacheMultiplier', mockGetCacheMultiplier);
- expect(deps).toHaveProperty('bulkWriteOps');
- expect(deps.bulkWriteOps).toHaveProperty('insertMany', mockBulkInsertTransactions);
- expect(deps.bulkWriteOps).toHaveProperty('updateBalance', mockUpdateBalance);
});
it('should include model from primaryConfig in recordCollectedUsage params', async () => {
@@ -288,36 +201,4 @@ describe('OpenAIChatCompletionController', () => {
);
});
});
-
- describe('recursionLimit resolution', () => {
- it('should pass resolveRecursionLimit result to processStream config', async () => {
- const { resolveRecursionLimit } = require('@librechat/api');
- resolveRecursionLimit.mockReturnValueOnce(75);
-
- await OpenAIChatCompletionController(req, res);
-
- expect(mockProcessStream).toHaveBeenCalledWith(
- expect.anything(),
- expect.objectContaining({ recursionLimit: 75 }),
- expect.anything(),
- );
- });
-
- it('should call resolveRecursionLimit with agentsEConfig and agent', async () => {
- const { resolveRecursionLimit } = require('@librechat/api');
- const { getAgent } = require('~/models');
- const mockAgent = { id: 'agent-123', name: 'Test', recursion_limit: 200 };
- getAgent.mockResolvedValueOnce(mockAgent);
-
- req.config = {
- endpoints: {
- agents: { recursionLimit: 100, maxRecursionLimit: 150, allowedProviders: [] },
- },
- };
-
- await OpenAIChatCompletionController(req, res);
-
- expect(resolveRecursionLimit).toHaveBeenCalledWith(req.config.endpoints.agents, mockAgent);
- });
- });
});
diff --git a/api/server/controllers/agents/__tests__/responses.unit.spec.js b/api/server/controllers/agents/__tests__/responses.unit.spec.js
index 26f5f5d30b..e16ca394b2 100644
--- a/api/server/controllers/agents/__tests__/responses.unit.spec.js
+++ b/api/server/controllers/agents/__tests__/responses.unit.spec.js
@@ -101,33 +101,37 @@ jest.mock('~/server/services/ToolService', () => ({
loadToolsForExecution: jest.fn().mockResolvedValue([]),
}));
-const mockGetMultiplier = jest.fn().mockReturnValue(1);
-const mockGetCacheMultiplier = jest.fn().mockReturnValue(null);
+jest.mock('~/models/spendTokens', () => ({
+ spendTokens: mockSpendTokens,
+ spendStructuredTokens: mockSpendStructuredTokens,
+}));
-jest.mock('~/server/controllers/agents/callbacks', () => {
- const noop = { handle: jest.fn() };
- return {
- createToolEndCallback: jest.fn().mockReturnValue(jest.fn()),
- createResponsesToolEndCallback: jest.fn().mockReturnValue(jest.fn()),
- markSummarizationUsage: jest.fn().mockImplementation((usage) => usage),
- agentLogHandlerObj: noop,
- buildSummarizationHandlers: jest.fn().mockReturnValue({
- on_summarize_start: noop,
- on_summarize_delta: noop,
- on_summarize_complete: noop,
- }),
- };
-});
+jest.mock('~/server/controllers/agents/callbacks', () => ({
+ createToolEndCallback: jest.fn().mockReturnValue(jest.fn()),
+ createResponsesToolEndCallback: jest.fn().mockReturnValue(jest.fn()),
+}));
jest.mock('~/server/services/PermissionService', () => ({
findAccessibleResources: jest.fn().mockResolvedValue([]),
}));
-const mockUpdateBalance = jest.fn().mockResolvedValue({});
-const mockBulkInsertTransactions = jest.fn().mockResolvedValue(undefined);
+jest.mock('~/models/Conversation', () => ({
+ getConvoFiles: jest.fn().mockResolvedValue([]),
+ saveConvo: jest.fn().mockResolvedValue({}),
+ getConvo: jest.fn().mockResolvedValue(null),
+}));
+
+jest.mock('~/models/Agent', () => ({
+ getAgent: jest.fn().mockResolvedValue({
+ id: 'agent-123',
+ name: 'Test Agent',
+ provider: 'anthropic',
+ model_parameters: { model: 'claude-3' },
+ }),
+ getAgents: jest.fn().mockResolvedValue([]),
+}));
jest.mock('~/models', () => ({
- getAgent: jest.fn().mockResolvedValue({ id: 'agent-123', name: 'Test Agent' }),
getFiles: jest.fn(),
getUserKey: jest.fn(),
getMessages: jest.fn().mockResolvedValue([]),
@@ -137,15 +141,6 @@ jest.mock('~/models', () => ({
getUserCodeFiles: jest.fn(),
getToolFilesByIds: jest.fn(),
getCodeGeneratedFiles: jest.fn(),
- updateBalance: mockUpdateBalance,
- bulkInsertTransactions: mockBulkInsertTransactions,
- spendTokens: mockSpendTokens,
- spendStructuredTokens: mockSpendStructuredTokens,
- getMultiplier: mockGetMultiplier,
- getCacheMultiplier: mockGetCacheMultiplier,
- getConvoFiles: jest.fn().mockResolvedValue([]),
- saveConvo: jest.fn().mockResolvedValue({}),
- getConvo: jest.fn().mockResolvedValue(null),
}));
describe('createResponse controller', () => {
@@ -183,117 +178,13 @@ describe('createResponse controller', () => {
};
});
- describe('conversation ownership validation', () => {
- it('should skip ownership check when previous_response_id is not provided', async () => {
- const { getConvo } = require('~/models');
- await createResponse(req, res);
- expect(getConvo).not.toHaveBeenCalled();
- });
-
- it('should return 400 when previous_response_id is not a string', async () => {
- const { validateResponseRequest, sendResponsesErrorResponse } = require('@librechat/api');
- validateResponseRequest.mockReturnValueOnce({
- request: {
- model: 'agent-123',
- input: 'Hello',
- stream: false,
- previous_response_id: { $gt: '' },
- },
- });
-
- await createResponse(req, res);
- expect(sendResponsesErrorResponse).toHaveBeenCalledWith(
- res,
- 400,
- 'previous_response_id must be a string',
- 'invalid_request',
- );
- });
-
- it('should return 404 when conversation is not owned by user', async () => {
- const { validateResponseRequest, sendResponsesErrorResponse } = require('@librechat/api');
- const { getConvo } = require('~/models');
- validateResponseRequest.mockReturnValueOnce({
- request: {
- model: 'agent-123',
- input: 'Hello',
- stream: false,
- previous_response_id: 'resp_abc',
- },
- });
- getConvo.mockResolvedValueOnce(null);
-
- await createResponse(req, res);
- expect(getConvo).toHaveBeenCalledWith('user-123', 'resp_abc');
- expect(sendResponsesErrorResponse).toHaveBeenCalledWith(
- res,
- 404,
- 'Conversation not found',
- 'not_found',
- );
- });
-
- it('should proceed when conversation is owned by user', async () => {
- const { validateResponseRequest, sendResponsesErrorResponse } = require('@librechat/api');
- const { getConvo } = require('~/models');
- validateResponseRequest.mockReturnValueOnce({
- request: {
- model: 'agent-123',
- input: 'Hello',
- stream: false,
- previous_response_id: 'resp_abc',
- },
- });
- getConvo.mockResolvedValueOnce({ conversationId: 'resp_abc', user: 'user-123' });
-
- await createResponse(req, res);
- expect(getConvo).toHaveBeenCalledWith('user-123', 'resp_abc');
- expect(sendResponsesErrorResponse).not.toHaveBeenCalledWith(
- res,
- 404,
- expect.any(String),
- expect.any(String),
- );
- });
-
- it('should return 500 when getConvo throws a DB error', async () => {
- const { validateResponseRequest, sendResponsesErrorResponse } = require('@librechat/api');
- const { getConvo } = require('~/models');
- validateResponseRequest.mockReturnValueOnce({
- request: {
- model: 'agent-123',
- input: 'Hello',
- stream: false,
- previous_response_id: 'resp_abc',
- },
- });
- getConvo.mockRejectedValueOnce(new Error('DB connection failed'));
-
- await createResponse(req, res);
- expect(sendResponsesErrorResponse).toHaveBeenCalledWith(
- res,
- 500,
- expect.any(String),
- expect.any(String),
- );
- });
- });
-
describe('token usage recording - non-streaming', () => {
it('should call recordCollectedUsage after successful non-streaming completion', async () => {
await createResponse(req, res);
expect(mockRecordCollectedUsage).toHaveBeenCalledTimes(1);
expect(mockRecordCollectedUsage).toHaveBeenCalledWith(
- {
- spendTokens: mockSpendTokens,
- spendStructuredTokens: mockSpendStructuredTokens,
- pricing: { getMultiplier: mockGetMultiplier, getCacheMultiplier: mockGetCacheMultiplier },
- bulkWriteOps: {
- insertMany: mockBulkInsertTransactions,
- updateBalance: mockUpdateBalance,
- },
- },
+ { spendTokens: mockSpendTokens, spendStructuredTokens: mockSpendStructuredTokens },
expect.objectContaining({
user: 'user-123',
conversationId: expect.any(String),
@@ -318,18 +209,12 @@ describe('createResponse controller', () => {
);
});
- it('should pass spendTokens, spendStructuredTokens, pricing, and bulkWriteOps as dependencies', async () => {
+ it('should pass spendTokens and spendStructuredTokens as dependencies', async () => {
await createResponse(req, res);
const [deps] = mockRecordCollectedUsage.mock.calls[0];
expect(deps).toHaveProperty('spendTokens', mockSpendTokens);
expect(deps).toHaveProperty('spendStructuredTokens', mockSpendStructuredTokens);
- expect(deps).toHaveProperty('pricing');
- expect(deps.pricing).toHaveProperty('getMultiplier', mockGetMultiplier);
- expect(deps.pricing).toHaveProperty('getCacheMultiplier', mockGetCacheMultiplier);
- expect(deps).toHaveProperty('bulkWriteOps');
- expect(deps.bulkWriteOps).toHaveProperty('insertMany', mockBulkInsertTransactions);
- expect(deps.bulkWriteOps).toHaveProperty('updateBalance', mockUpdateBalance);
});
it('should include model from primaryConfig in recordCollectedUsage params', async () => {
@@ -359,15 +244,7 @@ describe('createResponse controller', () => {
expect(mockRecordCollectedUsage).toHaveBeenCalledTimes(1);
expect(mockRecordCollectedUsage).toHaveBeenCalledWith(
- {
- spendTokens: mockSpendTokens,
- spendStructuredTokens: mockSpendStructuredTokens,
- pricing: { getMultiplier: mockGetMultiplier, getCacheMultiplier: mockGetCacheMultiplier },
- bulkWriteOps: {
- insertMany: mockBulkInsertTransactions,
- updateBalance: mockUpdateBalance,
- },
- },
+ { spendTokens: mockSpendTokens, spendStructuredTokens: mockSpendStructuredTokens },
expect.objectContaining({
user: 'user-123',
context: 'message',
@@ -380,7 +257,28 @@ describe('createResponse controller', () => {
it('should collect usage from on_chat_model_end events', async () => {
const api = require('@librechat/api');
+ let capturedOnChatModelEnd;
+ api.createAggregatorEventHandlers.mockImplementation(() => {
+ return {
+ on_message_delta: { handle: jest.fn() },
+ on_reasoning_delta: { handle: jest.fn() },
+ on_run_step: { handle: jest.fn() },
+ on_run_step_delta: { handle: jest.fn() },
+ on_chat_model_end: {
+ handle: jest.fn((event, data) => {
+ if (capturedOnChatModelEnd) {
+ capturedOnChatModelEnd(event, data);
+ }
+ }),
+ },
+ };
+ });
+
api.createRun.mockImplementation(async ({ customHandlers }) => {
+ capturedOnChatModelEnd = (event, data) => {
+ customHandlers.on_chat_model_end.handle(event, data);
+ };
+
return {
processStream: jest.fn().mockImplementation(async () => {
customHandlers.on_chat_model_end.handle('on_chat_model_end', {
@@ -397,6 +295,7 @@ describe('createResponse controller', () => {
});
await createResponse(req, res);
+
expect(mockRecordCollectedUsage).toHaveBeenCalledWith(
expect.any(Object),
expect.objectContaining({
diff --git a/api/server/controllers/agents/__tests__/v1.duplicate-actions.spec.js b/api/server/controllers/agents/__tests__/v1.duplicate-actions.spec.js
deleted file mode 100644
index cc298bd03a..0000000000
--- a/api/server/controllers/agents/__tests__/v1.duplicate-actions.spec.js
+++ /dev/null
@@ -1,159 +0,0 @@
-jest.mock('~/server/services/PermissionService', () => ({
- findPubliclyAccessibleResources: jest.fn(),
- findAccessibleResources: jest.fn(),
- hasPublicPermission: jest.fn(),
- grantPermission: jest.fn().mockResolvedValue({}),
-}));
-
-jest.mock('~/server/services/Config', () => ({
- getCachedTools: jest.fn(),
- getMCPServerTools: jest.fn(),
-}));
-
-const mongoose = require('mongoose');
-const { actionDelimiter } = require('librechat-data-provider');
-const { agentSchema, actionSchema } = require('@librechat/data-schemas');
-const { MongoMemoryServer } = require('mongodb-memory-server');
-const { duplicateAgent } = require('../v1');
-
-let mongoServer;
-
-beforeAll(async () => {
- mongoServer = await MongoMemoryServer.create();
- const mongoUri = mongoServer.getUri();
- if (!mongoose.models.Agent) {
- mongoose.model('Agent', agentSchema);
- }
- if (!mongoose.models.Action) {
- mongoose.model('Action', actionSchema);
- }
- await mongoose.connect(mongoUri);
-}, 20000);
-
-afterAll(async () => {
- await mongoose.disconnect();
- await mongoServer.stop();
-});
-
-beforeEach(async () => {
- await mongoose.models.Agent.deleteMany({});
- await mongoose.models.Action.deleteMany({});
-});
-
-describe('duplicateAgentHandler — action domain extraction', () => {
- it('builds duplicated action entries using metadata.domain, not action_id', async () => {
- const userId = new mongoose.Types.ObjectId();
- const originalAgentId = `agent_original`;
-
- const agent = await mongoose.models.Agent.create({
- id: originalAgentId,
- name: 'Test Agent',
- author: userId.toString(),
- provider: 'openai',
- model: 'gpt-4',
- tools: [],
- actions: [`api.example.com${actionDelimiter}act_original`],
- versions: [{ name: 'Test Agent', createdAt: new Date(), updatedAt: new Date() }],
- });
-
- await mongoose.models.Action.create({
- user: userId,
- action_id: 'act_original',
- agent_id: originalAgentId,
- metadata: { domain: 'api.example.com' },
- });
-
- const req = {
- params: { id: agent.id },
- user: { id: userId.toString() },
- };
- const res = {
- status: jest.fn().mockReturnThis(),
- json: jest.fn(),
- };
-
- await duplicateAgent(req, res);
-
- expect(res.status).toHaveBeenCalledWith(201);
-
- const { agent: newAgent, actions: newActions } = res.json.mock.calls[0][0];
-
- expect(newAgent.id).not.toBe(originalAgentId);
- expect(String(newAgent.author)).toBe(userId.toString());
- expect(newActions).toHaveLength(1);
- expect(newActions[0].metadata.domain).toBe('api.example.com');
- expect(newActions[0].agent_id).toBe(newAgent.id);
-
- for (const actionEntry of newAgent.actions) {
- const [domain, actionId] = actionEntry.split(actionDelimiter);
- expect(domain).toBe('api.example.com');
- expect(actionId).toBeTruthy();
- expect(actionId).not.toBe('act_original');
- }
-
- const allActions = await mongoose.models.Action.find({}).lean();
- expect(allActions).toHaveLength(2);
-
- const originalAction = allActions.find((a) => a.action_id === 'act_original');
- expect(originalAction.agent_id).toBe(originalAgentId);
-
- const duplicatedAction = allActions.find((a) => a.action_id !== 'act_original');
- expect(duplicatedAction.agent_id).toBe(newAgent.id);
- expect(duplicatedAction.metadata.domain).toBe('api.example.com');
- });
-
- it('strips sensitive metadata fields from duplicated actions', async () => {
- const userId = new mongoose.Types.ObjectId();
- const originalAgentId = 'agent_sensitive';
-
- await mongoose.models.Agent.create({
- id: originalAgentId,
- name: 'Sensitive Agent',
- author: userId.toString(),
- provider: 'openai',
- model: 'gpt-4',
- tools: [],
- actions: [`secure.api.com${actionDelimiter}act_secret`],
- versions: [{ name: 'Sensitive Agent', createdAt: new Date(), updatedAt: new Date() }],
- });
-
- await mongoose.models.Action.create({
- user: userId,
- action_id: 'act_secret',
- agent_id: originalAgentId,
- metadata: {
- domain: 'secure.api.com',
- api_key: 'sk-secret-key-12345',
- oauth_client_id: 'client_id_xyz',
- oauth_client_secret: 'client_secret_xyz',
- },
- });
-
- const req = {
- params: { id: originalAgentId },
- user: { id: userId.toString() },
- };
- const res = {
- status: jest.fn().mockReturnThis(),
- json: jest.fn(),
- };
-
- await duplicateAgent(req, res);
-
- expect(res.status).toHaveBeenCalledWith(201);
-
- const duplicatedAction = await mongoose.models.Action.findOne({
- agent_id: { $ne: originalAgentId },
- }).lean();
-
- expect(duplicatedAction.metadata.domain).toBe('secure.api.com');
- expect(duplicatedAction.metadata.api_key).toBeUndefined();
- expect(duplicatedAction.metadata.oauth_client_id).toBeUndefined();
- expect(duplicatedAction.metadata.oauth_client_secret).toBeUndefined();
-
- const originalAction = await mongoose.models.Action.findOne({
- action_id: 'act_secret',
- }).lean();
- expect(originalAction.metadata.api_key).toBe('sk-secret-key-12345');
- });
-});
diff --git a/api/server/controllers/agents/__tests__/v1.spec.js b/api/server/controllers/agents/__tests__/v1.spec.js
index 39cf994fef..b7e7b67a22 100644
--- a/api/server/controllers/agents/__tests__/v1.spec.js
+++ b/api/server/controllers/agents/__tests__/v1.spec.js
@@ -1,8 +1,10 @@
const { duplicateAgent } = require('../v1');
-const { getAgent, createAgent, getActions } = require('~/models');
+const { getAgent, createAgent } = require('~/models/Agent');
+const { getActions } = require('~/models/Action');
const { nanoid } = require('nanoid');
-jest.mock('~/models');
+jest.mock('~/models/Agent');
+jest.mock('~/models/Action');
jest.mock('nanoid');
describe('duplicateAgent', () => {
diff --git a/api/server/controllers/agents/callbacks.js b/api/server/controllers/agents/callbacks.js
index 40fdf74212..0bb935795d 100644
--- a/api/server/controllers/agents/callbacks.js
+++ b/api/server/controllers/agents/callbacks.js
@@ -1,13 +1,7 @@
const { nanoid } = require('nanoid');
const { logger } = require('@librechat/data-schemas');
+const { Constants, EnvVar, GraphEvents, ToolEndHandler } = require('@librechat/agents');
const { Tools, StepTypes, FileContext, ErrorTypes } = require('librechat-data-provider');
-const {
- EnvVar,
- Constants,
- GraphEvents,
- GraphNodeKeys,
- ToolEndHandler,
-} = require('@librechat/agents');
const {
sendEvent,
GenerationJobManager,
@@ -77,9 +71,7 @@ class ModelEndHandler {
usage.model = modelName;
}
- const taggedUsage = markSummarizationUsage(usage, metadata);
-
- this.collectedUsage.push(taggedUsage);
+ this.collectedUsage.push(usage);
} catch (error) {
logger.error('Error handling model end event:', error);
return this.finalize(errorMessage);
@@ -141,7 +133,6 @@ function getDefaultHandlers({
collectedUsage,
streamId = null,
toolExecuteOptions = null,
- summarizationOptions = null,
}) {
if (!res || !aggregateContent) {
throw new Error(
@@ -254,37 +245,6 @@ function getDefaultHandlers({
handlers[GraphEvents.ON_TOOL_EXECUTE] = createToolExecuteHandler(toolExecuteOptions);
}
- if (summarizationOptions?.enabled !== false) {
- handlers[GraphEvents.ON_SUMMARIZE_START] = {
- handle: async (_event, data) => {
- await emitEvent(res, streamId, {
- event: GraphEvents.ON_SUMMARIZE_START,
- data,
- });
- },
- };
- handlers[GraphEvents.ON_SUMMARIZE_DELTA] = {
- handle: async (_event, data) => {
- aggregateContent({ event: GraphEvents.ON_SUMMARIZE_DELTA, data });
- await emitEvent(res, streamId, {
- event: GraphEvents.ON_SUMMARIZE_DELTA,
- data,
- });
- },
- };
- handlers[GraphEvents.ON_SUMMARIZE_COMPLETE] = {
- handle: async (_event, data) => {
- aggregateContent({ event: GraphEvents.ON_SUMMARIZE_COMPLETE, data });
- await emitEvent(res, streamId, {
- event: GraphEvents.ON_SUMMARIZE_COMPLETE,
- data,
- });
- },
- };
- }
-
- handlers[GraphEvents.ON_AGENT_LOG] = { handle: agentLogHandler };
-
return handlers;
}
@@ -708,62 +668,8 @@ function createResponsesToolEndCallback({ req, res, tracker, artifactPromises })
};
}
-const ALLOWED_LOG_LEVELS = new Set(['debug', 'info', 'warn', 'error']);
-
-function agentLogHandler(_event, data) {
- if (!data) {
- return;
- }
- const logFn = ALLOWED_LOG_LEVELS.has(data.level) ? logger[data.level] : logger.debug;
- const meta = typeof data.data === 'object' && data.data != null ? data.data : {};
- logFn(`[agents:${data.scope ?? 'unknown'}] ${data.message ?? ''}`, {
- ...meta,
- runId: data.runId,
- agentId: data.agentId,
- });
-}
-
-function markSummarizationUsage(usage, metadata) {
- const node = metadata?.langgraph_node;
- if (typeof node === 'string' && node.startsWith(GraphNodeKeys.SUMMARIZE)) {
- return { ...usage, usage_type: 'summarization' };
- }
- return usage;
-}
-
-const agentLogHandlerObj = { handle: agentLogHandler };
-
-/**
- * Builds the three summarization SSE event handlers.
- * In streaming mode, each event is forwarded to the client via `res.write`.
- * In non-streaming mode, the handlers are no-ops.
- * @param {{ isStreaming: boolean, res: import('express').Response }} opts
- */
-function buildSummarizationHandlers({ isStreaming, res }) {
- if (!isStreaming) {
- const noop = { handle: () => {} };
- return { on_summarize_start: noop, on_summarize_delta: noop, on_summarize_complete: noop };
- }
- const writeEvent = (name) => ({
- handle: async (_event, data) => {
- if (!res.writableEnded) {
- res.write(`event: ${name}\ndata: ${JSON.stringify(data)}\n\n`);
- }
- },
- });
- return {
- on_summarize_start: writeEvent('on_summarize_start'),
- on_summarize_delta: writeEvent('on_summarize_delta'),
- on_summarize_complete: writeEvent('on_summarize_complete'),
- };
-}
-
module.exports = {
- agentLogHandler,
- agentLogHandlerObj,
getDefaultHandlers,
createToolEndCallback,
- markSummarizationUsage,
- buildSummarizationHandlers,
createResponsesToolEndCallback,
};
diff --git a/api/server/controllers/agents/client.js b/api/server/controllers/agents/client.js
index 3c1f91bd60..49240a6b3b 100644
--- a/api/server/controllers/agents/client.js
+++ b/api/server/controllers/agents/client.js
@@ -3,31 +3,26 @@ const { logger } = require('@librechat/data-schemas');
const { getBufferString, HumanMessage } = require('@langchain/core/messages');
const {
createRun,
- isEnabled,
+ Tokenizer,
checkAccess,
buildToolSet,
- logToolError,
sanitizeTitle,
+ logToolError,
payloadParser,
resolveHeaders,
createSafeUser,
initializeAgent,
getBalanceConfig,
- omitTitleOptions,
getProviderConfig,
+ omitTitleOptions,
memoryInstructions,
- createTokenCounter,
applyContextToAgent,
- recordCollectedUsage,
+ createTokenCounter,
GenerationJobManager,
getTransactionsConfig,
- resolveRecursionLimit,
createMemoryProcessor,
- loadAgent: loadAgentFn,
createMultiAgentMapper,
filterMalformedContentParts,
- countFormattedMessageTokens,
- hydrateMissingIndexTokenCounts,
} = require('@librechat/api');
const {
Callback,
@@ -48,17 +43,16 @@ const {
isEphemeralAgentId,
removeNullishValues,
} = require('librechat-data-provider');
-const { filterFilesByAgentAccess } = require('~/server/services/Files/permissions');
+const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
const { createContextHandlers } = require('~/app/clients/prompts');
-const { resolveConfigServers } = require('~/server/services/MCP');
-const { getMCPServerTools } = require('~/server/services/Config');
+const { getConvoFiles } = require('~/models/Conversation');
const BaseClient = require('~/app/clients/BaseClient');
+const { getRoleByName } = require('~/models/Role');
+const { loadAgent } = require('~/models/Agent');
const { getMCPManager } = require('~/config');
const db = require('~/models');
-const loadAgent = (params) => loadAgentFn(params, { getAgent: db.getAgent, getMCPServerTools });
-
class AgentClient extends BaseClient {
constructor(options = {}) {
super(null, options);
@@ -66,6 +60,9 @@ class AgentClient extends BaseClient {
* @type {string} */
this.clientName = EModelEndpoint.agents;
+ /** @type {'discard' | 'summarize'} */
+ this.contextStrategy = 'discard';
+
/** @deprecated @type {true} - Is a Chat Completion Request */
this.isChatCompletion = true;
@@ -217,6 +214,7 @@ class AgentClient extends BaseClient {
}))
: []),
];
+
if (this.options.attachments) {
const attachments = await this.options.attachments;
const latestMessage = orderedMessages[orderedMessages.length - 1];
@@ -243,11 +241,6 @@ class AgentClient extends BaseClient {
);
}
- /** @type {Record} */
- const canonicalTokenCountMap = {};
- /** @type {Record} */
- const tokenCountMap = {};
- let promptTokenTotal = 0;
const formattedMessages = orderedMessages.map((message, i) => {
const formattedMessage = formatMessage({
message,
@@ -267,14 +260,12 @@ class AgentClient extends BaseClient {
}
}
- const dbTokenCount = orderedMessages[i].tokenCount;
- const needsTokenCount = !dbTokenCount || message.fileContext;
+ const needsTokenCount =
+ (this.contextStrategy && !orderedMessages[i].tokenCount) || message.fileContext;
+ /* If tokens were never counted, or, is a Vision request and the message has files, count again */
if (needsTokenCount || (this.isVisionModel && (message.image_urls || message.files))) {
- orderedMessages[i].tokenCount = countFormattedMessageTokens(
- formattedMessage,
- this.getEncoding(),
- );
+ orderedMessages[i].tokenCount = this.getTokenCountForMessage(formattedMessage);
}
/* If message has files, calculate image token cost */
@@ -288,37 +279,17 @@ class AgentClient extends BaseClient {
if (file.metadata?.fileIdentifier) {
continue;
}
+ // orderedMessages[i].tokenCount += this.calculateImageTokenCost({
+ // width: file.width,
+ // height: file.height,
+ // detail: this.options.imageDetail ?? ImageDetail.auto,
+ // });
}
}
- const tokenCount = Number(orderedMessages[i].tokenCount);
- const normalizedTokenCount = Number.isFinite(tokenCount) && tokenCount > 0 ? tokenCount : 0;
- canonicalTokenCountMap[i] = normalizedTokenCount;
- promptTokenTotal += normalizedTokenCount;
-
- if (message.messageId) {
- tokenCountMap[message.messageId] = normalizedTokenCount;
- }
-
- if (isEnabled(process.env.AGENT_DEBUG_LOGGING)) {
- const role = message.isCreatedByUser ? 'user' : 'assistant';
- const hasSummary =
- Array.isArray(message.content) && message.content.some((p) => p && p.type === 'summary');
- const suffix = hasSummary ? '[S]' : '';
- const id = (message.messageId ?? message.id ?? '').slice(-8);
- const recalced = needsTokenCount ? orderedMessages[i].tokenCount : null;
- logger.debug(
- `[AgentClient] msg[${i}] ${role}${suffix} id=…${id} db=${dbTokenCount} needsRecount=${needsTokenCount} recalced=${recalced} tokens=${normalizedTokenCount}`,
- );
- }
-
return formattedMessage;
});
- payload = formattedMessages;
- messages = orderedMessages;
- promptTokens = promptTokenTotal;
-
/**
* Build shared run context - applies to ALL agents in the run.
* This includes: file context (latest message), augmented prompt (RAG), memory context.
@@ -348,20 +319,23 @@ class AgentClient extends BaseClient {
const sharedRunContext = sharedRunContextParts.join('\n\n');
- /** Preserve canonical pre-format token counts for all history entering graph formatting */
- this.indexTokenCountMap = canonicalTokenCountMap;
+ /** @type {Record | undefined} */
+ let tokenCountMap;
- /** Extract contextMeta from the parent response (second-to-last in ordered chain;
- * last is the current user message). Seeds the pruner's calibration EMA for this run. */
- const parentResponse =
- orderedMessages.length >= 2 ? orderedMessages[orderedMessages.length - 2] : undefined;
- if (parentResponse?.contextMeta && !parentResponse.isCreatedByUser) {
- this.contextMeta = parentResponse.contextMeta;
+ if (this.contextStrategy) {
+ ({ payload, promptTokens, tokenCountMap, messages } = await this.handleContextStrategy({
+ orderedMessages,
+ formattedMessages,
+ }));
+ }
+
+ for (let i = 0; i < messages.length; i++) {
+ this.indexTokenCountMap[i] = messages[i].tokenCount;
}
const result = {
- prompt: payload,
tokenCountMap,
+ prompt: payload,
promptTokens,
messages,
};
@@ -379,9 +353,6 @@ class AgentClient extends BaseClient {
*/
const ephemeralAgent = this.options.req.body.ephemeralAgent;
const mcpManager = getMCPManager();
-
- const configServers = await resolveConfigServers(this.options.req);
-
await Promise.all(
allAgents.map(({ agent, agentId }) =>
applyContextToAgent({
@@ -389,7 +360,6 @@ class AgentClient extends BaseClient {
agentId,
logger,
mcpManager,
- configServers,
sharedRunContext,
ephemeralAgent: agentId === this.options.agent.id ? ephemeralAgent : undefined,
}),
@@ -439,7 +409,7 @@ class AgentClient extends BaseClient {
user,
permissionType: PermissionTypes.MEMORIES,
permissions: [Permissions.USE],
- getRoleByName: db.getRoleByName,
+ getRoleByName,
});
if (!hasAccess) {
@@ -499,14 +469,13 @@ class AgentClient extends BaseClient {
},
},
{
+ getConvoFiles,
getFiles: db.getFiles,
getUserKey: db.getUserKey,
- getConvoFiles: db.getConvoFiles,
updateFilesUsage: db.updateFilesUsage,
getUserKeyValues: db.getUserKeyValues,
getToolFilesByIds: db.getToolFilesByIds,
getCodeGeneratedFiles: db.getCodeGeneratedFiles,
- filterFilesByAgentAccess,
},
);
@@ -655,29 +624,82 @@ class AgentClient extends BaseClient {
context = 'message',
collectedUsage = this.collectedUsage,
}) {
- const result = await recordCollectedUsage(
- {
- spendTokens: db.spendTokens,
- spendStructuredTokens: db.spendStructuredTokens,
- pricing: { getMultiplier: db.getMultiplier, getCacheMultiplier: db.getCacheMultiplier },
- bulkWriteOps: { insertMany: db.bulkInsertTransactions, updateBalance: db.updateBalance },
- },
- {
- user: this.user ?? this.options.req.user?.id,
- conversationId: this.conversationId,
- collectedUsage,
- model: model ?? this.model ?? this.options.agent.model_parameters.model,
+ if (!collectedUsage || !collectedUsage.length) {
+ return;
+ }
+ // Use first entry's input_tokens as the base input (represents initial user message context)
+ // Support both OpenAI format (input_token_details) and Anthropic format (cache_*_input_tokens)
+ const firstUsage = collectedUsage[0];
+ const input_tokens =
+ (firstUsage?.input_tokens || 0) +
+ (Number(firstUsage?.input_token_details?.cache_creation) ||
+ Number(firstUsage?.cache_creation_input_tokens) ||
+ 0) +
+ (Number(firstUsage?.input_token_details?.cache_read) ||
+ Number(firstUsage?.cache_read_input_tokens) ||
+ 0);
+
+ // Sum output_tokens directly from all entries - works for both sequential and parallel execution
+ // This avoids the incremental calculation that produced negative values for parallel agents
+ let total_output_tokens = 0;
+
+ for (const usage of collectedUsage) {
+ if (!usage) {
+ continue;
+ }
+
+ // Support both OpenAI format (input_token_details) and Anthropic format (cache_*_input_tokens)
+ const cache_creation =
+ Number(usage.input_token_details?.cache_creation) ||
+ Number(usage.cache_creation_input_tokens) ||
+ 0;
+ const cache_read =
+ Number(usage.input_token_details?.cache_read) || Number(usage.cache_read_input_tokens) || 0;
+
+ // Accumulate output tokens for the usage summary
+ total_output_tokens += Number(usage.output_tokens) || 0;
+
+ const txMetadata = {
context,
- messageId: this.responseMessageId,
balance,
transactions,
+ conversationId: this.conversationId,
+ user: this.user ?? this.options.req.user?.id,
endpointTokenConfig: this.options.endpointTokenConfig,
- },
- );
+ model: usage.model ?? model ?? this.model ?? this.options.agent.model_parameters.model,
+ };
- if (result) {
- this.usage = result;
+ if (cache_creation > 0 || cache_read > 0) {
+ spendStructuredTokens(txMetadata, {
+ promptTokens: {
+ input: usage.input_tokens,
+ write: cache_creation,
+ read: cache_read,
+ },
+ completionTokens: usage.output_tokens,
+ }).catch((err) => {
+ logger.error(
+ '[api/server/controllers/agents/client.js #recordCollectedUsage] Error spending structured tokens',
+ err,
+ );
+ });
+ continue;
+ }
+ spendTokens(txMetadata, {
+ promptTokens: usage.input_tokens,
+ completionTokens: usage.output_tokens,
+ }).catch((err) => {
+ logger.error(
+ '[api/server/controllers/agents/client.js #recordCollectedUsage] Error spending tokens',
+ err,
+ );
+ });
}
+
+ this.usage = {
+ input_tokens,
+ output_tokens: total_output_tokens,
+ };
}
/**
@@ -693,7 +715,39 @@ class AgentClient extends BaseClient {
* @returns {number}
*/
getTokenCountForResponse({ content }) {
- return countFormattedMessageTokens({ role: 'assistant', content }, this.getEncoding());
+ return this.getTokenCountForMessage({
+ role: 'assistant',
+ content,
+ });
+ }
+
+ /**
+ * Calculates the correct token count for the current user message based on the token count map and API usage.
+ * Edge case: If the calculation results in a negative value, it returns the original estimate.
+ * If revisiting a conversation with a chat history entirely composed of token estimates,
+ * the cumulative token count going forward should become more accurate as the conversation progresses.
+ * @param {Object} params - The parameters for the calculation.
+ * @param {Record} params.tokenCountMap - A map of message IDs to their token counts.
+ * @param {string} params.currentMessageId - The ID of the current message to calculate.
+ * @param {OpenAIUsageMetadata} params.usage - The usage object returned by the API.
+ * @returns {number} The correct token count for the current user message.
+ */
+ calculateCurrentTokenCount({ tokenCountMap, currentMessageId, usage }) {
+ const originalEstimate = tokenCountMap[currentMessageId] || 0;
+
+ if (!usage || typeof usage[this.inputTokensKey] !== 'number') {
+ return originalEstimate;
+ }
+
+ tokenCountMap[currentMessageId] = 0;
+ const totalTokensFromMap = Object.values(tokenCountMap).reduce((sum, count) => {
+ const numCount = Number(count);
+ return sum + (isNaN(numCount) ? 0 : numCount);
+ }, 0);
+ const totalInputTokens = usage[this.inputTokensKey] ?? 0;
+
+ const currentMessageTokens = totalInputTokens - totalTokensFromMap;
+ return currentMessageTokens > 0 ? currentMessageTokens : originalEstimate;
}
/**
@@ -734,41 +788,18 @@ class AgentClient extends BaseClient {
},
user: createSafeUser(this.options.req.user),
},
- recursionLimit: resolveRecursionLimit(agentsEConfig, this.options.agent),
+ recursionLimit: agentsEConfig?.recursionLimit ?? 50,
signal: abortController.signal,
streamMode: 'values',
version: 'v2',
};
const toolSet = buildToolSet(this.options.agent);
- const tokenCounter = createTokenCounter(this.getEncoding());
- let {
- messages: initialMessages,
- indexTokenCountMap,
- summary: initialSummary,
- boundaryTokenAdjustment,
- } = formatAgentMessages(payload, this.indexTokenCountMap, toolSet);
- if (boundaryTokenAdjustment) {
- logger.debug(
- `[AgentClient] Boundary token adjustment: ${boundaryTokenAdjustment.original} → ${boundaryTokenAdjustment.adjusted} (${boundaryTokenAdjustment.remainingChars}/${boundaryTokenAdjustment.totalChars} chars)`,
- );
- }
- if (indexTokenCountMap && isEnabled(process.env.AGENT_DEBUG_LOGGING)) {
- const entries = Object.entries(indexTokenCountMap);
- const perMsg = entries.map(([idx, count]) => {
- const msg = initialMessages[Number(idx)];
- const type = msg ? msg._getType() : '?';
- return `${idx}:${type}=${count}`;
- });
- logger.debug(
- `[AgentClient] Token map after format: [${perMsg.join(', ')}] (payload=${payload.length}, formatted=${initialMessages.length})`,
- );
- }
- indexTokenCountMap = hydrateMissingIndexTokenCounts({
- messages: initialMessages,
- indexTokenCountMap,
- tokenCounter,
- });
+ let { messages: initialMessages, indexTokenCountMap } = formatAgentMessages(
+ payload,
+ this.indexTokenCountMap,
+ toolSet,
+ );
/**
* @param {BaseMessage[]} messages
@@ -782,6 +813,17 @@ class AgentClient extends BaseClient {
agents.push(...this.agentConfigs.values());
}
+ if (agents[0].recursion_limit && typeof agents[0].recursion_limit === 'number') {
+ config.recursionLimit = agents[0].recursion_limit;
+ }
+
+ if (
+ agentsEConfig?.maxRecursionLimit &&
+ config.recursionLimit > agentsEConfig?.maxRecursionLimit
+ ) {
+ config.recursionLimit = agentsEConfig?.maxRecursionLimit;
+ }
+
// TODO: needs to be added as part of AgentContext initialization
// const noSystemModelRegex = [/\b(o1-preview|o1-mini|amazon\.titan-text)\b/gi];
// const noSystemMessages = noSystemModelRegex.some((regex) =>
@@ -811,32 +853,16 @@ class AgentClient extends BaseClient {
memoryPromise = this.runMemory(messages);
- /** Seed calibration state from previous run if encoding matches */
- const currentEncoding = this.getEncoding();
- const prevMeta = this.contextMeta;
- const encodingMatch = prevMeta?.encoding === currentEncoding;
- const calibrationRatio =
- encodingMatch && prevMeta?.calibrationRatio > 0 ? prevMeta.calibrationRatio : undefined;
-
- if (prevMeta) {
- logger.debug(
- `[AgentClient] contextMeta from parent: ratio=${prevMeta.calibrationRatio}, encoding=${prevMeta.encoding}, current=${currentEncoding}, seeded=${calibrationRatio ?? 'none'}`,
- );
- }
-
run = await createRun({
agents,
messages,
indexTokenCountMap,
- initialSummary,
- calibrationRatio,
runId: this.responseMessageId,
signal: abortController.signal,
customHandlers: this.options.eventHandlers,
requestBody: config.configurable.requestBody,
user: createSafeUser(this.options.req?.user),
- summarizationConfig: appConfig?.summarization,
- tokenCounter,
+ tokenCounter: createTokenCounter(this.getEncoding()),
});
if (!run) {
@@ -865,11 +891,9 @@ class AgentClient extends BaseClient {
config.signal = null;
};
- const hideSequentialOutputs = config.configurable.hide_sequential_outputs;
await runAgents(initialMessages);
-
/** @deprecated Agent Chain */
- if (hideSequentialOutputs) {
+ if (config.configurable.hide_sequential_outputs) {
this.contentParts = this.contentParts.filter((part, index) => {
// Include parts that are either:
// 1. At or after the finalContentStart index
@@ -898,18 +922,6 @@ class AgentClient extends BaseClient {
});
}
} finally {
- /** Capture calibration state from the run for persistence on the response message.
- * Runs in finally so values are captured even on abort. */
- const ratio = this.run?.getCalibrationRatio() ?? 0;
- if (ratio > 0 && ratio !== 1) {
- this.contextMeta = {
- calibrationRatio: Math.round(ratio * 1000) / 1000,
- encoding: this.getEncoding(),
- };
- } else {
- this.contextMeta = undefined;
- }
-
try {
const attachments = await this.awaitMemoryWithTimeout(memoryPromise);
if (attachments && attachments.length > 0) {
@@ -1095,7 +1107,6 @@ class AgentClient extends BaseClient {
titlePrompt: endpointConfig?.titlePrompt,
titlePromptTemplate: endpointConfig?.titlePromptTemplate,
chainOptions: {
- runName: 'TitleRun',
signal: abortController.signal,
callbacks: [
{
@@ -1136,7 +1147,6 @@ class AgentClient extends BaseClient {
model: clientOptions.model,
balance: balanceConfig,
transactions: transactionsConfig,
- messageId: this.responseMessageId,
}).catch((err) => {
logger.error(
'[api/server/controllers/agents/client.js #titleConvo] Error recording collected usage',
@@ -1170,12 +1180,11 @@ class AgentClient extends BaseClient {
context = 'message',
}) {
try {
- await db.spendTokens(
+ await spendTokens(
{
model,
context,
balance,
- messageId: this.responseMessageId,
conversationId: this.conversationId,
user: this.user ?? this.options.req.user?.id,
endpointTokenConfig: this.options.endpointTokenConfig,
@@ -1189,12 +1198,11 @@ class AgentClient extends BaseClient {
'reasoning_tokens' in usage &&
typeof usage.reasoning_tokens === 'number'
) {
- await db.spendTokens(
+ await spendTokens(
{
model,
balance,
context: 'reasoning',
- messageId: this.responseMessageId,
conversationId: this.conversationId,
user: this.user ?? this.options.req.user?.id,
endpointTokenConfig: this.options.endpointTokenConfig,
@@ -1210,13 +1218,19 @@ class AgentClient extends BaseClient {
}
}
- /** Anthropic Claude models use a distinct BPE tokenizer; all others default to o200k_base. */
getEncoding() {
- if (this.model && this.model.toLowerCase().includes('claude')) {
- return 'claude';
- }
return 'o200k_base';
}
+
+ /**
+ * Returns the token count of a given text. It also checks and resets the tokenizers if necessary.
+ * @param {string} text - The text to get the token count for.
+ * @returns {number} The token count of the given text.
+ */
+ getTokenCount(text) {
+ const encoding = this.getEncoding();
+ return Tokenizer.getTokenCount(text, encoding);
+ }
}
module.exports = AgentClient;
diff --git a/api/server/controllers/agents/client.test.js b/api/server/controllers/agents/client.test.js
index 1595f652f7..9dd3567047 100644
--- a/api/server/controllers/agents/client.test.js
+++ b/api/server/controllers/agents/client.test.js
@@ -15,19 +15,13 @@ jest.mock('@librechat/api', () => ({
checkAccess: jest.fn(),
initializeAgent: jest.fn(),
createMemoryProcessor: jest.fn(),
+}));
+
+jest.mock('~/models/Agent', () => ({
loadAgent: jest.fn(),
}));
-jest.mock('~/server/services/Config', () => ({
- getMCPServerTools: jest.fn(),
-}));
-
-jest.mock('~/server/services/MCP', () => ({
- resolveConfigServers: jest.fn().mockResolvedValue({}),
-}));
-
-jest.mock('~/models', () => ({
- getAgent: jest.fn(),
+jest.mock('~/models/Role', () => ({
getRoleByName: jest.fn(),
}));
@@ -269,7 +263,6 @@ describe('AgentClient - titleConvo', () => {
transactions: {
enabled: true,
},
- messageId: 'response-123',
});
});
@@ -1319,7 +1312,7 @@ describe('AgentClient - titleConvo', () => {
});
// Verify formatInstructionsForContext was called with correct server names
- expect(mockFormatInstructions).toHaveBeenCalledWith(['server1', 'server2'], {});
+ expect(mockFormatInstructions).toHaveBeenCalledWith(['server1', 'server2']);
// Verify the instructions do NOT contain [object Promise]
expect(client.options.agent.instructions).not.toContain('[object Promise]');
@@ -1359,10 +1352,10 @@ describe('AgentClient - titleConvo', () => {
});
// Verify formatInstructionsForContext was called with ephemeral server names
- expect(mockFormatInstructions).toHaveBeenCalledWith(
- ['ephemeral-server1', 'ephemeral-server2'],
- {},
- );
+ expect(mockFormatInstructions).toHaveBeenCalledWith([
+ 'ephemeral-server1',
+ 'ephemeral-server2',
+ ]);
// Verify no [object Promise] in instructions
expect(client.options.agent.instructions).not.toContain('[object Promise]');
@@ -1822,7 +1815,7 @@ describe('AgentClient - titleConvo', () => {
/** Traversal stops at msg-2 (has summary), so we get msg-4 -> msg-3 -> msg-2 */
expect(result).toHaveLength(3);
- expect(result[0].content).toEqual([{ type: 'text', text: 'Summary of conversation' }]);
+ expect(result[0].text).toBe('Summary of conversation');
expect(result[0].role).toBe('system');
expect(result[0].mapped).toBe(true);
expect(result[1].mapped).toBe(true);
@@ -2144,7 +2137,7 @@ describe('AgentClient - titleConvo', () => {
};
mockCheckAccess = require('@librechat/api').checkAccess;
- mockLoadAgent = require('@librechat/api').loadAgent;
+ mockLoadAgent = require('~/models/Agent').loadAgent;
mockInitializeAgent = require('@librechat/api').initializeAgent;
mockCreateMemoryProcessor = require('@librechat/api').createMemoryProcessor;
});
@@ -2201,7 +2194,6 @@ describe('AgentClient - titleConvo', () => {
expect.objectContaining({
agent_id: differentAgentId,
}),
- expect.any(Object),
);
expect(mockInitializeAgent).toHaveBeenCalledWith(
expect.objectContaining({
diff --git a/api/server/controllers/agents/errors.js b/api/server/controllers/agents/errors.js
index b16ce75591..54b296a5d2 100644
--- a/api/server/controllers/agents/errors.js
+++ b/api/server/controllers/agents/errors.js
@@ -3,8 +3,8 @@ const { logger } = require('@librechat/data-schemas');
const { CacheKeys, ViolationTypes } = require('librechat-data-provider');
const { sendResponse } = require('~/server/middleware/error');
const { recordUsage } = require('~/server/services/Threads');
+const { getConvo } = require('~/models/Conversation');
const getLogStores = require('~/cache/getLogStores');
-const { getConvo } = require('~/models');
/**
* @typedef {Object} ErrorHandlerContext
diff --git a/api/server/controllers/agents/filterAuthorizedTools.spec.js b/api/server/controllers/agents/filterAuthorizedTools.spec.js
deleted file mode 100644
index e6b41aef16..0000000000
--- a/api/server/controllers/agents/filterAuthorizedTools.spec.js
+++ /dev/null
@@ -1,692 +0,0 @@
-const mongoose = require('mongoose');
-const { v4: uuidv4 } = require('uuid');
-const { Constants } = require('librechat-data-provider');
-const { agentSchema } = require('@librechat/data-schemas');
-const { MongoMemoryServer } = require('mongodb-memory-server');
-
-const d = Constants.mcp_delimiter;
-
-const mockGetAllServerConfigs = jest.fn();
-
-jest.mock('~/server/services/Config', () => ({
- getCachedTools: jest.fn().mockResolvedValue({
- web_search: true,
- execute_code: true,
- file_search: true,
- }),
-}));
-
-jest.mock('~/config', () => ({
- getMCPServersRegistry: jest.fn(() => ({
- getAllServerConfigs: mockGetAllServerConfigs,
- })),
-}));
-
-jest.mock('~/server/services/MCP', () => ({
- resolveConfigServers: jest.fn().mockResolvedValue({}),
-}));
-
-jest.mock('~/server/services/Files/strategies', () => ({
- getStrategyFunctions: jest.fn(),
-}));
-
-jest.mock('~/server/services/Files/images/avatar', () => ({
- resizeAvatar: jest.fn(),
-}));
-
-jest.mock('~/server/services/Files/process', () => ({
- filterFile: jest.fn(),
-}));
-
-jest.mock('~/server/services/PermissionService', () => ({
- findAccessibleResources: jest.fn().mockResolvedValue([]),
- findPubliclyAccessibleResources: jest.fn().mockResolvedValue([]),
- grantPermission: jest.fn(),
- hasPublicPermission: jest.fn().mockResolvedValue(false),
- checkPermission: jest.fn().mockResolvedValue(true),
-}));
-
-jest.mock('~/models', () => {
- const mongoose = require('mongoose');
- const { createModels, createMethods } = require('@librechat/data-schemas');
- createModels(mongoose);
- const methods = createMethods(mongoose);
- return {
- ...methods,
- getCategoriesWithCounts: jest.fn(),
- deleteFileByFilter: jest.fn(),
- };
-});
-
-jest.mock('~/cache', () => ({
- getLogStores: jest.fn(() => ({
- get: jest.fn(),
- set: jest.fn(),
- delete: jest.fn(),
- })),
-}));
-
-const {
- filterAuthorizedTools,
- createAgent: createAgentHandler,
- updateAgent: updateAgentHandler,
- duplicateAgent: duplicateAgentHandler,
- revertAgentVersion: revertAgentVersionHandler,
-} = require('./v1');
-
-const { getMCPServersRegistry } = require('~/config');
-
-let Agent;
-
-describe('MCP Tool Authorization', () => {
- let mongoServer;
- let mockReq;
- let mockRes;
-
- beforeAll(async () => {
- mongoServer = await MongoMemoryServer.create();
- const mongoUri = mongoServer.getUri();
- await mongoose.connect(mongoUri);
- Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
- }, 20000);
-
- afterAll(async () => {
- await mongoose.disconnect();
- await mongoServer.stop();
- });
-
- beforeEach(async () => {
- await Agent.deleteMany({});
- jest.clearAllMocks();
-
- getMCPServersRegistry.mockImplementation(() => ({
- getAllServerConfigs: mockGetAllServerConfigs,
- }));
- mockGetAllServerConfigs.mockResolvedValue({
- authorizedServer: { type: 'sse', url: 'https://authorized.example.com' },
- anotherServer: { type: 'sse', url: 'https://another.example.com' },
- });
-
- mockReq = {
- user: {
- id: new mongoose.Types.ObjectId().toString(),
- role: 'USER',
- },
- body: {},
- params: {},
- query: {},
- app: { locals: { fileStrategy: 'local' } },
- };
-
- mockRes = {
- status: jest.fn().mockReturnThis(),
- json: jest.fn().mockReturnThis(),
- };
- });
-
- describe('filterAuthorizedTools', () => {
- const availableTools = { web_search: true, custom_tool: true };
- const userId = 'test-user-123';
-
- test('should keep authorized MCP tools and strip unauthorized ones', async () => {
- const result = await filterAuthorizedTools({
- tools: [`toolA${d}authorizedServer`, `toolB${d}forbiddenServer`, 'web_search'],
- userId,
- availableTools,
- });
-
- expect(result).toContain(`toolA${d}authorizedServer`);
- expect(result).toContain('web_search');
- expect(result).not.toContain(`toolB${d}forbiddenServer`);
- });
-
- test('should keep system tools without querying MCP registry', async () => {
- const result = await filterAuthorizedTools({
- tools: ['execute_code', 'file_search', 'web_search'],
- userId,
- availableTools: {},
- });
-
- expect(result).toEqual(['execute_code', 'file_search', 'web_search']);
- expect(mockGetAllServerConfigs).not.toHaveBeenCalled();
- });
-
- test('should not query MCP registry when no MCP tools are present', async () => {
- const result = await filterAuthorizedTools({
- tools: ['web_search', 'custom_tool'],
- userId,
- availableTools,
- });
-
- expect(result).toEqual(['web_search', 'custom_tool']);
- expect(mockGetAllServerConfigs).not.toHaveBeenCalled();
- });
-
- test('should filter all MCP tools when registry is uninitialized', async () => {
- getMCPServersRegistry.mockImplementation(() => {
- throw new Error('MCPServersRegistry has not been initialized.');
- });
-
- const result = await filterAuthorizedTools({
- tools: [`toolA${d}someServer`, 'web_search'],
- userId,
- availableTools,
- });
-
- expect(result).toEqual(['web_search']);
- expect(result).not.toContain(`toolA${d}someServer`);
- });
-
- test('should handle mixed authorized and unauthorized MCP tools', async () => {
- const result = await filterAuthorizedTools({
- tools: [
- 'web_search',
- `search${d}authorizedServer`,
- `attack${d}victimServer`,
- 'execute_code',
- `list${d}anotherServer`,
- `steal${d}nonexistent`,
- ],
- userId,
- availableTools,
- });
-
- expect(result).toEqual([
- 'web_search',
- `search${d}authorizedServer`,
- 'execute_code',
- `list${d}anotherServer`,
- ]);
- });
-
- test('should handle empty tools array', async () => {
- const result = await filterAuthorizedTools({
- tools: [],
- userId,
- availableTools,
- });
-
- expect(result).toEqual([]);
- expect(mockGetAllServerConfigs).not.toHaveBeenCalled();
- });
-
- test('should handle null/undefined tool entries gracefully', async () => {
- const result = await filterAuthorizedTools({
- tools: [null, undefined, '', 'web_search'],
- userId,
- availableTools,
- });
-
- expect(result).toEqual(['web_search']);
- });
-
- test('should call getAllServerConfigs with the correct userId', async () => {
- await filterAuthorizedTools({
- tools: [`tool${d}authorizedServer`],
- userId: 'specific-user-id',
- availableTools,
- });
-
- expect(mockGetAllServerConfigs).toHaveBeenCalledWith('specific-user-id', undefined);
- });
-
- test('should pass configServers to getAllServerConfigs and allow config-override servers', async () => {
- const configServers = {
- 'config-override-server': { type: 'sse', url: 'https://override.example.com' },
- };
- mockGetAllServerConfigs.mockResolvedValue({
- 'config-override-server': configServers['config-override-server'],
- });
-
- const result = await filterAuthorizedTools({
- tools: [`tool${d}config-override-server`, `tool${d}unauthorizedServer`],
- userId,
- availableTools,
- configServers,
- });
-
- expect(mockGetAllServerConfigs).toHaveBeenCalledWith(userId, configServers);
- expect(result).toContain(`tool${d}config-override-server`);
- expect(result).not.toContain(`tool${d}unauthorizedServer`);
- });
-
- test('should only call getAllServerConfigs once even with multiple MCP tools', async () => {
- await filterAuthorizedTools({
- tools: [`tool1${d}authorizedServer`, `tool2${d}anotherServer`, `tool3${d}unknownServer`],
- userId,
- availableTools,
- });
-
- expect(mockGetAllServerConfigs).toHaveBeenCalledTimes(1);
- });
-
- test('should preserve existing MCP tools when registry is unavailable', async () => {
- getMCPServersRegistry.mockImplementation(() => {
- throw new Error('MCPServersRegistry has not been initialized.');
- });
-
- const existingTools = [`toolA${d}serverA`, `toolB${d}serverB`];
-
- const result = await filterAuthorizedTools({
- tools: [...existingTools, `newTool${d}unknownServer`, 'web_search'],
- userId,
- availableTools,
- existingTools,
- });
-
- expect(result).toContain(`toolA${d}serverA`);
- expect(result).toContain(`toolB${d}serverB`);
- expect(result).toContain('web_search');
- expect(result).not.toContain(`newTool${d}unknownServer`);
- });
-
- test('should still reject all MCP tools when registry is unavailable and no existingTools', async () => {
- getMCPServersRegistry.mockImplementation(() => {
- throw new Error('MCPServersRegistry has not been initialized.');
- });
-
- const result = await filterAuthorizedTools({
- tools: [`toolA${d}serverA`, 'web_search'],
- userId,
- availableTools,
- });
-
- expect(result).toEqual(['web_search']);
- });
-
- test('should not preserve malformed existing tools when registry is unavailable', async () => {
- getMCPServersRegistry.mockImplementation(() => {
- throw new Error('MCPServersRegistry has not been initialized.');
- });
-
- const malformedTool = `a${d}b${d}c`;
- const result = await filterAuthorizedTools({
- tools: [malformedTool, `legit${d}serverA`, 'web_search'],
- userId,
- availableTools,
- existingTools: [malformedTool, `legit${d}serverA`],
- });
-
- expect(result).toContain(`legit${d}serverA`);
- expect(result).toContain('web_search');
- expect(result).not.toContain(malformedTool);
- });
-
- test('should reject malformed MCP tool keys with multiple delimiters', async () => {
- const result = await filterAuthorizedTools({
- tools: [
- `attack${d}victimServer${d}authorizedServer`,
- `legit${d}authorizedServer`,
- `a${d}b${d}c${d}d`,
- 'web_search',
- ],
- userId,
- availableTools,
- });
-
- expect(result).toEqual([`legit${d}authorizedServer`, 'web_search']);
- expect(result).not.toContainEqual(expect.stringContaining('victimServer'));
- expect(result).not.toContainEqual(expect.stringContaining(`a${d}b`));
- });
- });
-
- describe('createAgentHandler - MCP tool authorization', () => {
- test('should strip unauthorized MCP tools on create', async () => {
- mockReq.body = {
- provider: 'openai',
- model: 'gpt-4',
- name: 'MCP Test Agent',
- tools: ['web_search', `validTool${d}authorizedServer`, `attack${d}forbiddenServer`],
- };
-
- await createAgentHandler(mockReq, mockRes);
-
- expect(mockRes.status).toHaveBeenCalledWith(201);
- const agent = mockRes.json.mock.calls[0][0];
- expect(agent.tools).toContain('web_search');
- expect(agent.tools).toContain(`validTool${d}authorizedServer`);
- expect(agent.tools).not.toContain(`attack${d}forbiddenServer`);
- });
-
- test('should not 500 when MCP registry is uninitialized', async () => {
- getMCPServersRegistry.mockImplementation(() => {
- throw new Error('MCPServersRegistry has not been initialized.');
- });
-
- mockReq.body = {
- provider: 'openai',
- model: 'gpt-4',
- name: 'MCP Uninitialized Test',
- tools: [`tool${d}someServer`, 'web_search'],
- };
-
- await createAgentHandler(mockReq, mockRes);
-
- expect(mockRes.status).toHaveBeenCalledWith(201);
- const agent = mockRes.json.mock.calls[0][0];
- expect(agent.tools).toEqual(['web_search']);
- });
-
- test('should store mcpServerNames only for authorized servers', async () => {
- mockReq.body = {
- provider: 'openai',
- model: 'gpt-4',
- name: 'MCP Names Test',
- tools: [`toolA${d}authorizedServer`, `toolB${d}forbiddenServer`],
- };
-
- await createAgentHandler(mockReq, mockRes);
-
- expect(mockRes.status).toHaveBeenCalledWith(201);
- const agent = mockRes.json.mock.calls[0][0];
- const agentInDb = await Agent.findOne({ id: agent.id });
- expect(agentInDb.mcpServerNames).toContain('authorizedServer');
- expect(agentInDb.mcpServerNames).not.toContain('forbiddenServer');
- });
- });
-
- describe('updateAgentHandler - MCP tool authorization', () => {
- let existingAgentId;
- let existingAgentAuthorId;
-
- beforeEach(async () => {
- existingAgentAuthorId = new mongoose.Types.ObjectId();
- const agent = await Agent.create({
- id: `agent_${uuidv4()}`,
- name: 'Original Agent',
- provider: 'openai',
- model: 'gpt-4',
- author: existingAgentAuthorId,
- tools: ['web_search', `existingTool${d}authorizedServer`],
- mcpServerNames: ['authorizedServer'],
- versions: [
- {
- name: 'Original Agent',
- provider: 'openai',
- model: 'gpt-4',
- tools: ['web_search', `existingTool${d}authorizedServer`],
- createdAt: new Date(),
- updatedAt: new Date(),
- },
- ],
- });
- existingAgentId = agent.id;
- });
-
- test('should preserve existing MCP tools even if editor lacks access', async () => {
- mockGetAllServerConfigs.mockResolvedValue({});
-
- mockReq.user.id = existingAgentAuthorId.toString();
- mockReq.params.id = existingAgentId;
- mockReq.body = {
- tools: ['web_search', `existingTool${d}authorizedServer`],
- };
-
- await updateAgentHandler(mockReq, mockRes);
-
- expect(mockRes.json).toHaveBeenCalled();
- const updatedAgent = mockRes.json.mock.calls[0][0];
- expect(updatedAgent.tools).toContain(`existingTool${d}authorizedServer`);
- expect(updatedAgent.tools).toContain('web_search');
- });
-
- test('should reject newly added unauthorized MCP tools', async () => {
- mockReq.user.id = existingAgentAuthorId.toString();
- mockReq.params.id = existingAgentId;
- mockReq.body = {
- tools: ['web_search', `existingTool${d}authorizedServer`, `attack${d}forbiddenServer`],
- };
-
- await updateAgentHandler(mockReq, mockRes);
-
- expect(mockRes.json).toHaveBeenCalled();
- const updatedAgent = mockRes.json.mock.calls[0][0];
- expect(updatedAgent.tools).toContain('web_search');
- expect(updatedAgent.tools).toContain(`existingTool${d}authorizedServer`);
- expect(updatedAgent.tools).not.toContain(`attack${d}forbiddenServer`);
- });
-
- test('should allow adding authorized MCP tools', async () => {
- mockReq.user.id = existingAgentAuthorId.toString();
- mockReq.params.id = existingAgentId;
- mockReq.body = {
- tools: ['web_search', `existingTool${d}authorizedServer`, `newTool${d}anotherServer`],
- };
-
- await updateAgentHandler(mockReq, mockRes);
-
- expect(mockRes.json).toHaveBeenCalled();
- const updatedAgent = mockRes.json.mock.calls[0][0];
- expect(updatedAgent.tools).toContain(`newTool${d}anotherServer`);
- });
-
- test('should not query MCP registry when no new MCP tools added', async () => {
- mockReq.user.id = existingAgentAuthorId.toString();
- mockReq.params.id = existingAgentId;
- mockReq.body = {
- tools: ['web_search', `existingTool${d}authorizedServer`],
- };
-
- await updateAgentHandler(mockReq, mockRes);
-
- expect(mockGetAllServerConfigs).not.toHaveBeenCalled();
- });
-
- test('should preserve existing MCP tools when registry unavailable and user edits agent', async () => {
- getMCPServersRegistry.mockImplementation(() => {
- throw new Error('MCPServersRegistry has not been initialized.');
- });
-
- mockReq.user.id = existingAgentAuthorId.toString();
- mockReq.params.id = existingAgentId;
- mockReq.body = {
- name: 'Renamed After Restart',
- tools: ['web_search', `existingTool${d}authorizedServer`],
- };
-
- await updateAgentHandler(mockReq, mockRes);
-
- expect(mockRes.json).toHaveBeenCalled();
- const updatedAgent = mockRes.json.mock.calls[0][0];
- expect(updatedAgent.tools).toContain(`existingTool${d}authorizedServer`);
- expect(updatedAgent.tools).toContain('web_search');
- expect(updatedAgent.name).toBe('Renamed After Restart');
- });
-
- test('should preserve existing MCP tools when server not in configs (disconnected)', async () => {
- mockGetAllServerConfigs.mockResolvedValue({});
-
- mockReq.user.id = existingAgentAuthorId.toString();
- mockReq.params.id = existingAgentId;
- mockReq.body = {
- name: 'Edited While Disconnected',
- tools: ['web_search', `existingTool${d}authorizedServer`],
- };
-
- await updateAgentHandler(mockReq, mockRes);
-
- expect(mockRes.json).toHaveBeenCalled();
- const updatedAgent = mockRes.json.mock.calls[0][0];
- expect(updatedAgent.tools).toContain(`existingTool${d}authorizedServer`);
- expect(updatedAgent.name).toBe('Edited While Disconnected');
- });
- });
-
- describe('duplicateAgentHandler - MCP tool authorization', () => {
- let sourceAgentId;
- let sourceAgentAuthorId;
-
- beforeEach(async () => {
- sourceAgentAuthorId = new mongoose.Types.ObjectId();
- const agent = await Agent.create({
- id: `agent_${uuidv4()}`,
- name: 'Source Agent',
- provider: 'openai',
- model: 'gpt-4',
- author: sourceAgentAuthorId,
- tools: ['web_search', `tool${d}authorizedServer`, `tool${d}forbiddenServer`],
- mcpServerNames: ['authorizedServer', 'forbiddenServer'],
- versions: [
- {
- name: 'Source Agent',
- provider: 'openai',
- model: 'gpt-4',
- tools: ['web_search', `tool${d}authorizedServer`, `tool${d}forbiddenServer`],
- createdAt: new Date(),
- updatedAt: new Date(),
- },
- ],
- });
- sourceAgentId = agent.id;
- });
-
- test('should strip unauthorized MCP tools from duplicated agent', async () => {
- mockGetAllServerConfigs.mockResolvedValue({
- authorizedServer: { type: 'sse' },
- });
-
- mockReq.user.id = sourceAgentAuthorId.toString();
- mockReq.params.id = sourceAgentId;
-
- await duplicateAgentHandler(mockReq, mockRes);
-
- expect(mockRes.status).toHaveBeenCalledWith(201);
- const { agent: newAgent } = mockRes.json.mock.calls[0][0];
- expect(newAgent.id).not.toBe(sourceAgentId);
- expect(newAgent.tools).toContain('web_search');
- expect(newAgent.tools).toContain(`tool${d}authorizedServer`);
- expect(newAgent.tools).not.toContain(`tool${d}forbiddenServer`);
-
- const agentInDb = await Agent.findOne({ id: newAgent.id });
- expect(agentInDb.mcpServerNames).toContain('authorizedServer');
- expect(agentInDb.mcpServerNames).not.toContain('forbiddenServer');
- });
-
- test('should preserve source agent MCP tools when registry is unavailable', async () => {
- getMCPServersRegistry.mockImplementation(() => {
- throw new Error('MCPServersRegistry has not been initialized.');
- });
-
- mockReq.user.id = sourceAgentAuthorId.toString();
- mockReq.params.id = sourceAgentId;
-
- await duplicateAgentHandler(mockReq, mockRes);
-
- expect(mockRes.status).toHaveBeenCalledWith(201);
- const { agent: newAgent } = mockRes.json.mock.calls[0][0];
- expect(newAgent.tools).toContain('web_search');
- expect(newAgent.tools).toContain(`tool${d}authorizedServer`);
- expect(newAgent.tools).toContain(`tool${d}forbiddenServer`);
- });
- });
-
- describe('revertAgentVersionHandler - MCP tool authorization', () => {
- let existingAgentId;
- let existingAgentAuthorId;
-
- beforeEach(async () => {
- existingAgentAuthorId = new mongoose.Types.ObjectId();
- const agent = await Agent.create({
- id: `agent_${uuidv4()}`,
- name: 'Reverted Agent V2',
- provider: 'openai',
- model: 'gpt-4',
- author: existingAgentAuthorId,
- tools: ['web_search'],
- versions: [
- {
- name: 'Reverted Agent V1',
- provider: 'openai',
- model: 'gpt-4',
- tools: ['web_search', `oldTool${d}revokedServer`],
- createdAt: new Date(Date.now() - 10000),
- updatedAt: new Date(Date.now() - 10000),
- },
- {
- name: 'Reverted Agent V2',
- provider: 'openai',
- model: 'gpt-4',
- tools: ['web_search'],
- createdAt: new Date(),
- updatedAt: new Date(),
- },
- ],
- });
- existingAgentId = agent.id;
- });
-
- test('should strip unauthorized MCP tools after reverting to a previous version', async () => {
- mockGetAllServerConfigs.mockResolvedValue({
- authorizedServer: { type: 'sse' },
- });
-
- mockReq.user.id = existingAgentAuthorId.toString();
- mockReq.params.id = existingAgentId;
- mockReq.body = { version_index: 0 };
-
- await revertAgentVersionHandler(mockReq, mockRes);
-
- expect(mockRes.json).toHaveBeenCalled();
- const result = mockRes.json.mock.calls[0][0];
- expect(result.tools).toContain('web_search');
- expect(result.tools).not.toContain(`oldTool${d}revokedServer`);
-
- const agentInDb = await Agent.findOne({ id: existingAgentId });
- expect(agentInDb.tools).toContain('web_search');
- expect(agentInDb.tools).not.toContain(`oldTool${d}revokedServer`);
- });
-
- test('should keep authorized MCP tools after revert', async () => {
- await Agent.updateOne(
- { id: existingAgentId },
- { $set: { 'versions.0.tools': ['web_search', `tool${d}authorizedServer`] } },
- );
-
- mockReq.user.id = existingAgentAuthorId.toString();
- mockReq.params.id = existingAgentId;
- mockReq.body = { version_index: 0 };
-
- await revertAgentVersionHandler(mockReq, mockRes);
-
- expect(mockRes.json).toHaveBeenCalled();
- const result = mockRes.json.mock.calls[0][0];
- expect(result.tools).toContain('web_search');
- expect(result.tools).toContain(`tool${d}authorizedServer`);
- });
-
- test('should preserve version MCP tools when registry is unavailable on revert', async () => {
- await Agent.updateOne(
- { id: existingAgentId },
- {
- $set: {
- 'versions.0.tools': [
- 'web_search',
- `validTool${d}authorizedServer`,
- `otherTool${d}anotherServer`,
- ],
- },
- },
- );
-
- getMCPServersRegistry.mockImplementation(() => {
- throw new Error('MCPServersRegistry has not been initialized.');
- });
-
- mockReq.user.id = existingAgentAuthorId.toString();
- mockReq.params.id = existingAgentId;
- mockReq.body = { version_index: 0 };
-
- await revertAgentVersionHandler(mockReq, mockRes);
-
- expect(mockRes.json).toHaveBeenCalled();
- const result = mockRes.json.mock.calls[0][0];
- expect(result.tools).toContain('web_search');
- expect(result.tools).toContain(`validTool${d}authorizedServer`);
- expect(result.tools).toContain(`otherTool${d}anotherServer`);
-
- const agentInDb = await Agent.findOne({ id: existingAgentId });
- expect(agentInDb.tools).toContain(`validTool${d}authorizedServer`);
- expect(agentInDb.tools).toContain(`otherTool${d}anotherServer`);
- });
- });
-});
diff --git a/api/server/controllers/agents/openai.js b/api/server/controllers/agents/openai.js
index 9fa3af82c3..b334580eb1 100644
--- a/api/server/controllers/agents/openai.js
+++ b/api/server/controllers/agents/openai.js
@@ -15,21 +15,18 @@ const {
createErrorResponse,
recordCollectedUsage,
getTransactionsConfig,
- resolveRecursionLimit,
createToolExecuteHandler,
buildNonStreamingResponse,
createOpenAIStreamTracker,
createOpenAIContentAggregator,
isChatCompletionValidationFailure,
} = require('@librechat/api');
-const {
- buildSummarizationHandlers,
- markSummarizationUsage,
- createToolEndCallback,
- agentLogHandlerObj,
-} = require('~/server/controllers/agents/callbacks');
const { loadAgentTools, loadToolsForExecution } = require('~/server/services/ToolService');
+const { createToolEndCallback } = require('~/server/controllers/agents/callbacks');
const { findAccessibleResources } = require('~/server/services/PermissionService');
+const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
+const { getConvoFiles } = require('~/models/Conversation');
+const { getAgent, getAgents } = require('~/models/Agent');
const db = require('~/models');
/**
@@ -132,6 +129,7 @@ const OpenAIChatCompletionController = async (req, res) => {
const appConfig = req.config;
const requestStartTime = Date.now();
+ // Validate request
const validation = validateRequest(req.body);
if (isChatCompletionValidationFailure(validation)) {
return sendErrorResponse(res, 400, validation.error);
@@ -141,7 +139,7 @@ const OpenAIChatCompletionController = async (req, res) => {
const agentId = request.model;
// Look up the agent
- const agent = await db.getAgent({ id: agentId });
+ const agent = await getAgent({ id: agentId });
if (!agent) {
return sendErrorResponse(
res,
@@ -152,18 +150,20 @@ const OpenAIChatCompletionController = async (req, res) => {
);
}
- const responseId = `chatcmpl-${nanoid()}`;
+ // Generate IDs
+ const requestId = `chatcmpl-${nanoid()}`;
+ const conversationId = request.conversation_id ?? nanoid();
+ const parentMessageId = request.parent_message_id ?? null;
const created = Math.floor(Date.now() / 1000);
- /** @type {import('@librechat/api').OpenAIResponseContext} — key must be `requestId` to match the type used by createChunk/buildNonStreamingResponse */
const context = {
created,
- requestId: responseId,
+ requestId,
model: agentId,
};
logger.debug(
- `[OpenAI API] Response ${responseId} started for agent ${agentId}, stream: ${request.stream}`,
+ `[OpenAI API] Request ${requestId} started for agent ${agentId}, stream: ${request.stream}`,
);
// Set up abort controller
@@ -178,25 +178,10 @@ const OpenAIChatCompletionController = async (req, res) => {
});
try {
- if (request.conversation_id != null) {
- if (typeof request.conversation_id !== 'string') {
- return sendErrorResponse(
- res,
- 400,
- 'conversation_id must be a string',
- 'invalid_request_error',
- );
- }
- if (!(await db.getConvo(req.user?.id, request.conversation_id))) {
- return sendErrorResponse(res, 404, 'Conversation not found', 'invalid_request_error');
- }
- }
-
- const conversationId = request.conversation_id ?? nanoid();
- const parentMessageId = request.parent_message_id ?? null;
-
- const agentsEConfig = appConfig?.endpoints?.[EModelEndpoint.agents];
- const allowedProviders = new Set(agentsEConfig?.allowedProviders);
+ // Build allowed providers set
+ const allowedProviders = new Set(
+ appConfig?.endpoints?.[EModelEndpoint.agents]?.allowedProviders,
+ );
// Create tool loader
const loadTools = createToolLoader(abortController.signal);
@@ -221,7 +206,7 @@ const OpenAIChatCompletionController = async (req, res) => {
isInitialAgent: true,
},
{
- getConvoFiles: db.getConvoFiles,
+ getConvoFiles,
getFiles: db.getFiles,
getUserKey: db.getUserKey,
getMessages: db.getMessages,
@@ -280,22 +265,19 @@ const OpenAIChatCompletionController = async (req, res) => {
toolRegistry: primaryConfig.toolRegistry,
userMCPAuthMap: primaryConfig.userMCPAuthMap,
tool_resources: primaryConfig.tool_resources,
- actionsEnabled: primaryConfig.actionsEnabled,
});
},
toolEndCallback,
};
- const summarizationConfig = appConfig?.summarization;
-
const openaiMessages = convertMessages(request.messages);
const toolSet = buildToolSet(primaryConfig);
- const {
- messages: formattedMessages,
- indexTokenCountMap,
- summary: initialSummary,
- } = formatAgentMessages(openaiMessages, {}, toolSet);
+ const { messages: formattedMessages, indexTokenCountMap } = formatAgentMessages(
+ openaiMessages,
+ {},
+ toolSet,
+ );
/**
* Create a simple handler that processes data
@@ -438,30 +420,24 @@ const OpenAIChatCompletionController = async (req, res) => {
}),
// Usage tracking
- on_chat_model_end: {
- handle: (_event, data, metadata) => {
- const usage = data?.output?.usage_metadata;
- if (usage) {
- const taggedUsage = markSummarizationUsage(usage, metadata);
- collectedUsage.push(taggedUsage);
- const target = isStreaming ? tracker : aggregator;
- target.usage.promptTokens += taggedUsage.input_tokens ?? 0;
- target.usage.completionTokens += taggedUsage.output_tokens ?? 0;
- }
- },
- },
+ on_chat_model_end: createHandler((data) => {
+ const usage = data?.output?.usage_metadata;
+ if (usage) {
+ collectedUsage.push(usage);
+ const target = isStreaming ? tracker : aggregator;
+ target.usage.promptTokens += usage.input_tokens ?? 0;
+ target.usage.completionTokens += usage.output_tokens ?? 0;
+ }
+ }),
on_run_step_completed: createHandler(),
// Use proper ToolEndHandler for processing artifacts (images, file citations, code output)
on_tool_end: new ToolEndHandler(toolEndCallback, logger),
on_chain_stream: createHandler(),
on_chain_end: createHandler(),
on_agent_update: createHandler(),
- on_agent_log: agentLogHandlerObj,
on_custom_event: createHandler(),
+ // Event-driven tool execution handler
on_tool_execute: createToolExecuteHandler(toolExecuteOptions),
- ...(summarizationConfig?.enabled !== false
- ? buildSummarizationHandlers({ isStreaming, res })
- : {}),
};
// Create and run the agent
@@ -474,13 +450,11 @@ const OpenAIChatCompletionController = async (req, res) => {
agents: [primaryConfig],
messages: formattedMessages,
indexTokenCountMap,
- initialSummary,
- runId: responseId,
- summarizationConfig,
+ runId: requestId,
signal: abortController.signal,
customHandlers: handlers,
requestBody: {
- messageId: responseId,
+ messageId: requestId,
conversationId,
},
user: { id: userId },
@@ -490,19 +464,15 @@ const OpenAIChatCompletionController = async (req, res) => {
throw new Error('Failed to create agent run');
}
+ // Process the stream
const config = {
runName: 'AgentRun',
configurable: {
thread_id: conversationId,
user_id: userId,
user: createSafeUser(req.user),
- requestBody: {
- messageId: responseId,
- conversationId,
- },
...(userMCPAuthMap != null && { userMCPAuthMap }),
},
- recursionLimit: resolveRecursionLimit(agentsEConfig, agent),
signal: abortController.signal,
streamMode: 'values',
version: 'v2',
@@ -520,18 +490,12 @@ const OpenAIChatCompletionController = async (req, res) => {
const balanceConfig = getBalanceConfig(appConfig);
const transactionsConfig = getTransactionsConfig(appConfig);
recordCollectedUsage(
- {
- spendTokens: db.spendTokens,
- spendStructuredTokens: db.spendStructuredTokens,
- pricing: { getMultiplier: db.getMultiplier, getCacheMultiplier: db.getCacheMultiplier },
- bulkWriteOps: { insertMany: db.bulkInsertTransactions, updateBalance: db.updateBalance },
- },
+ { spendTokens, spendStructuredTokens },
{
user: userId,
conversationId,
collectedUsage,
context: 'message',
- messageId: responseId,
balance: balanceConfig,
transactions: transactionsConfig,
model: primaryConfig.model || agent.model_parameters?.model,
@@ -545,7 +509,7 @@ const OpenAIChatCompletionController = async (req, res) => {
if (isStreaming) {
sendFinalChunk(handlerConfig);
res.end();
- logger.debug(`[OpenAI API] Response ${responseId} completed in ${duration}ms (streaming)`);
+ logger.debug(`[OpenAI API] Request ${requestId} completed in ${duration}ms (streaming)`);
// Wait for artifact processing after response ends (non-blocking)
if (artifactPromises.length > 0) {
@@ -584,9 +548,7 @@ const OpenAIChatCompletionController = async (req, res) => {
usage,
);
res.json(response);
- logger.debug(
- `[OpenAI API] Response ${responseId} completed in ${duration}ms (non-streaming)`,
- );
+ logger.debug(`[OpenAI API] Request ${requestId} completed in ${duration}ms (non-streaming)`);
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'An error occurred';
@@ -637,7 +599,7 @@ const ListModelsController = async (req, res) => {
// Get the accessible agents
let agents = [];
if (accessibleAgentIds.length > 0) {
- agents = await db.getAgents({ _id: { $in: accessibleAgentIds } });
+ agents = await getAgents({ _id: { $in: accessibleAgentIds } });
}
const models = agents.map((agent) => ({
@@ -680,7 +642,7 @@ const GetModelController = async (req, res) => {
return sendErrorResponse(res, 401, 'Authentication required', 'auth_error');
}
- const agent = await db.getAgent({ id: model });
+ const agent = await getAgent({ id: model });
if (!agent) {
return sendErrorResponse(
diff --git a/api/server/controllers/agents/recordCollectedUsage.spec.js b/api/server/controllers/agents/recordCollectedUsage.spec.js
index 009c5b262c..6904f2ed39 100644
--- a/api/server/controllers/agents/recordCollectedUsage.spec.js
+++ b/api/server/controllers/agents/recordCollectedUsage.spec.js
@@ -2,29 +2,21 @@
* Tests for AgentClient.recordCollectedUsage
*
* This is a critical function that handles token spending for agent LLM calls.
- * The client now delegates to the TS recordCollectedUsage from @librechat/api,
- * passing pricing and bulkWriteOps deps.
+ * It must correctly handle:
+ * - Sequential execution (single agent with tool calls)
+ * - Parallel execution (multiple agents with independent inputs)
+ * - Cache token handling (OpenAI and Anthropic formats)
*/
const { EModelEndpoint } = require('librechat-data-provider');
+// Mock dependencies before requiring the module
const mockSpendTokens = jest.fn().mockResolvedValue();
const mockSpendStructuredTokens = jest.fn().mockResolvedValue();
-const mockGetMultiplier = jest.fn().mockReturnValue(1);
-const mockGetCacheMultiplier = jest.fn().mockReturnValue(null);
-const mockUpdateBalance = jest.fn().mockResolvedValue({});
-const mockBulkInsertTransactions = jest.fn().mockResolvedValue(undefined);
-const mockRecordCollectedUsage = jest
- .fn()
- .mockResolvedValue({ input_tokens: 100, output_tokens: 50 });
-jest.mock('~/models', () => ({
+jest.mock('~/models/spendTokens', () => ({
spendTokens: (...args) => mockSpendTokens(...args),
spendStructuredTokens: (...args) => mockSpendStructuredTokens(...args),
- getMultiplier: mockGetMultiplier,
- getCacheMultiplier: mockGetCacheMultiplier,
- updateBalance: mockUpdateBalance,
- bulkInsertTransactions: mockBulkInsertTransactions,
}));
jest.mock('~/config', () => ({
@@ -47,14 +39,6 @@ jest.mock('@librechat/agents', () => ({
}),
}));
-jest.mock('@librechat/api', () => {
- const actual = jest.requireActual('@librechat/api');
- return {
- ...actual,
- recordCollectedUsage: (...args) => mockRecordCollectedUsage(...args),
- };
-});
-
const AgentClient = require('./client');
describe('AgentClient - recordCollectedUsage', () => {
@@ -90,7 +74,30 @@ describe('AgentClient - recordCollectedUsage', () => {
});
describe('basic functionality', () => {
- it('should delegate to recordCollectedUsage with full deps', async () => {
+ it('should return early if collectedUsage is empty', async () => {
+ await client.recordCollectedUsage({
+ collectedUsage: [],
+ balance: { enabled: true },
+ transactions: { enabled: true },
+ });
+
+ expect(mockSpendTokens).not.toHaveBeenCalled();
+ expect(mockSpendStructuredTokens).not.toHaveBeenCalled();
+ expect(client.usage).toBeUndefined();
+ });
+
+ it('should return early if collectedUsage is null', async () => {
+ await client.recordCollectedUsage({
+ collectedUsage: null,
+ balance: { enabled: true },
+ transactions: { enabled: true },
+ });
+
+ expect(mockSpendTokens).not.toHaveBeenCalled();
+ expect(client.usage).toBeUndefined();
+ });
+
+ it('should handle single usage entry correctly', async () => {
const collectedUsage = [{ input_tokens: 100, output_tokens: 50, model: 'gpt-4' }];
await client.recordCollectedUsage({
@@ -99,57 +106,25 @@ describe('AgentClient - recordCollectedUsage', () => {
transactions: { enabled: true },
});
- expect(mockRecordCollectedUsage).toHaveBeenCalledTimes(1);
- const [deps, params] = mockRecordCollectedUsage.mock.calls[0];
-
- expect(deps).toHaveProperty('spendTokens');
- expect(deps).toHaveProperty('spendStructuredTokens');
- expect(deps).toHaveProperty('pricing');
- expect(deps.pricing).toHaveProperty('getMultiplier');
- expect(deps.pricing).toHaveProperty('getCacheMultiplier');
- expect(deps).toHaveProperty('bulkWriteOps');
- expect(deps.bulkWriteOps).toHaveProperty('insertMany');
- expect(deps.bulkWriteOps).toHaveProperty('updateBalance');
-
- expect(params).toEqual(
+ expect(mockSpendTokens).toHaveBeenCalledTimes(1);
+ expect(mockSpendTokens).toHaveBeenCalledWith(
expect.objectContaining({
- user: 'user-123',
conversationId: 'convo-123',
- collectedUsage,
- context: 'message',
- balance: { enabled: true },
- transactions: { enabled: true },
+ user: 'user-123',
+ model: 'gpt-4',
}),
+ { promptTokens: 100, completionTokens: 50 },
);
+ expect(client.usage.input_tokens).toBe(100);
+ expect(client.usage.output_tokens).toBe(50);
});
- it('should not set this.usage if collectedUsage is empty (returns undefined)', async () => {
- mockRecordCollectedUsage.mockResolvedValue(undefined);
-
- await client.recordCollectedUsage({
- collectedUsage: [],
- balance: { enabled: true },
- transactions: { enabled: true },
- });
-
- expect(client.usage).toBeUndefined();
- });
-
- it('should not set this.usage if collectedUsage is null (returns undefined)', async () => {
- mockRecordCollectedUsage.mockResolvedValue(undefined);
-
- await client.recordCollectedUsage({
- collectedUsage: null,
- balance: { enabled: true },
- transactions: { enabled: true },
- });
-
- expect(client.usage).toBeUndefined();
- });
-
- it('should set this.usage from recordCollectedUsage result', async () => {
- mockRecordCollectedUsage.mockResolvedValue({ input_tokens: 200, output_tokens: 75 });
- const collectedUsage = [{ input_tokens: 200, output_tokens: 75, model: 'gpt-4' }];
+ it('should skip null entries in collectedUsage', async () => {
+ const collectedUsage = [
+ { input_tokens: 100, output_tokens: 50, model: 'gpt-4' },
+ null,
+ { input_tokens: 200, output_tokens: 60, model: 'gpt-4' },
+ ];
await client.recordCollectedUsage({
collectedUsage,
@@ -157,62 +132,82 @@ describe('AgentClient - recordCollectedUsage', () => {
transactions: { enabled: true },
});
- expect(client.usage).toEqual({ input_tokens: 200, output_tokens: 75 });
+ expect(mockSpendTokens).toHaveBeenCalledTimes(2);
});
});
describe('sequential execution (single agent with tool calls)', () => {
- it('should pass all usage entries to recordCollectedUsage', async () => {
+ it('should calculate tokens correctly for sequential tool calls', async () => {
+ // Sequential flow: output of call N becomes part of input for call N+1
+ // Call 1: input=100, output=50
+ // Call 2: input=150 (100+50), output=30
+ // Call 3: input=180 (150+30), output=20
const collectedUsage = [
{ input_tokens: 100, output_tokens: 50, model: 'gpt-4' },
{ input_tokens: 150, output_tokens: 30, model: 'gpt-4' },
{ input_tokens: 180, output_tokens: 20, model: 'gpt-4' },
];
- mockRecordCollectedUsage.mockResolvedValue({ input_tokens: 100, output_tokens: 100 });
-
await client.recordCollectedUsage({
collectedUsage,
balance: { enabled: true },
transactions: { enabled: true },
});
- expect(mockRecordCollectedUsage).toHaveBeenCalledTimes(1);
- const [, params] = mockRecordCollectedUsage.mock.calls[0];
- expect(params.collectedUsage).toHaveLength(3);
+ expect(mockSpendTokens).toHaveBeenCalledTimes(3);
+ // Total output should be sum of all output_tokens: 50 + 30 + 20 = 100
expect(client.usage.output_tokens).toBe(100);
- expect(client.usage.input_tokens).toBe(100);
+ expect(client.usage.input_tokens).toBe(100); // First entry's input
});
});
describe('parallel execution (multiple agents)', () => {
- it('should pass parallel agent usage to recordCollectedUsage', async () => {
+ it('should handle parallel agents with independent input tokens', async () => {
+ // Parallel agents have INDEPENDENT input tokens (not cumulative)
+ // Agent A: input=100, output=50
+ // Agent B: input=80, output=40 (different context, not 100+50)
const collectedUsage = [
{ input_tokens: 100, output_tokens: 50, model: 'gpt-4' },
{ input_tokens: 80, output_tokens: 40, model: 'gpt-4' },
];
- mockRecordCollectedUsage.mockResolvedValue({ input_tokens: 100, output_tokens: 90 });
-
await client.recordCollectedUsage({
collectedUsage,
balance: { enabled: true },
transactions: { enabled: true },
});
- expect(mockRecordCollectedUsage).toHaveBeenCalledTimes(1);
- expect(client.usage.output_tokens).toBe(90);
+ expect(mockSpendTokens).toHaveBeenCalledTimes(2);
+ // Expected total output: 50 + 40 = 90
+ // output_tokens must be positive and should reflect total output
expect(client.usage.output_tokens).toBeGreaterThan(0);
});
- /** Bug regression: parallel agents where second agent has LOWER input tokens produced negative output via incremental calculation. */
- it('should NOT produce negative output_tokens', async () => {
+ it('should NOT produce negative output_tokens for parallel execution', async () => {
+ // Critical bug scenario: parallel agents where second agent has LOWER input tokens
const collectedUsage = [
{ input_tokens: 200, output_tokens: 100, model: 'gpt-4' },
{ input_tokens: 50, output_tokens: 30, model: 'gpt-4' },
];
- mockRecordCollectedUsage.mockResolvedValue({ input_tokens: 200, output_tokens: 130 });
+ await client.recordCollectedUsage({
+ collectedUsage,
+ balance: { enabled: true },
+ transactions: { enabled: true },
+ });
+
+ // output_tokens MUST be positive for proper token tracking
+ expect(client.usage.output_tokens).toBeGreaterThan(0);
+ // Correct value should be 100 + 30 = 130
+ });
+
+ it('should calculate correct total output for parallel agents', async () => {
+ // Three parallel agents with independent contexts
+ const collectedUsage = [
+ { input_tokens: 100, output_tokens: 50, model: 'gpt-4' },
+ { input_tokens: 120, output_tokens: 60, model: 'gpt-4-turbo' },
+ { input_tokens: 80, output_tokens: 40, model: 'claude-3' },
+ ];
await client.recordCollectedUsage({
collectedUsage,
@@ -220,44 +215,111 @@ describe('AgentClient - recordCollectedUsage', () => {
transactions: { enabled: true },
});
+ expect(mockSpendTokens).toHaveBeenCalledTimes(3);
+ // Total output should be 50 + 60 + 40 = 150
+ expect(client.usage.output_tokens).toBe(150);
+ });
+
+ it('should handle worst-case parallel scenario without negative tokens', async () => {
+ // Extreme case: first agent has very high input, subsequent have low
+ const collectedUsage = [
+ { input_tokens: 1000, output_tokens: 500, model: 'gpt-4' },
+ { input_tokens: 100, output_tokens: 50, model: 'gpt-4' },
+ { input_tokens: 50, output_tokens: 25, model: 'gpt-4' },
+ ];
+
+ await client.recordCollectedUsage({
+ collectedUsage,
+ balance: { enabled: true },
+ transactions: { enabled: true },
+ });
+
+ // Must be positive, should be 500 + 50 + 25 = 575
expect(client.usage.output_tokens).toBeGreaterThan(0);
- expect(client.usage.output_tokens).toBe(130);
+ expect(client.usage.output_tokens).toBe(575);
});
});
describe('real-world scenarios', () => {
- it('should correctly handle sequential tool calls with growing context', async () => {
- const collectedUsage = [
- { input_tokens: 31596, output_tokens: 151, model: 'claude-opus-4-5-20251101' },
- { input_tokens: 35368, output_tokens: 150, model: 'claude-opus-4-5-20251101' },
- { input_tokens: 58362, output_tokens: 295, model: 'claude-opus-4-5-20251101' },
- { input_tokens: 112604, output_tokens: 193, model: 'claude-opus-4-5-20251101' },
- { input_tokens: 257440, output_tokens: 2217, model: 'claude-opus-4-5-20251101' },
- ];
-
- mockRecordCollectedUsage.mockResolvedValue({ input_tokens: 31596, output_tokens: 3006 });
-
- await client.recordCollectedUsage({
- collectedUsage,
- balance: { enabled: true },
- transactions: { enabled: true },
- });
-
- expect(client.usage.input_tokens).toBe(31596);
- expect(client.usage.output_tokens).toBe(3006);
- });
-
- it('should correctly handle cache tokens', async () => {
+ it('should correctly sum output tokens for sequential tool calls with growing context', async () => {
+ // Real production data: Claude Opus with multiple tool calls
+ // Context grows as tool results are added, but output_tokens should only count model generations
const collectedUsage = [
{
- input_tokens: 788,
- output_tokens: 163,
- input_token_details: { cache_read: 0, cache_creation: 30808 },
+ input_tokens: 31596,
+ output_tokens: 151,
+ total_tokens: 31747,
+ input_token_details: { cache_read: 0, cache_creation: 0 },
+ model: 'claude-opus-4-5-20251101',
+ },
+ {
+ input_tokens: 35368,
+ output_tokens: 150,
+ total_tokens: 35518,
+ input_token_details: { cache_read: 0, cache_creation: 0 },
+ model: 'claude-opus-4-5-20251101',
+ },
+ {
+ input_tokens: 58362,
+ output_tokens: 295,
+ total_tokens: 58657,
+ input_token_details: { cache_read: 0, cache_creation: 0 },
+ model: 'claude-opus-4-5-20251101',
+ },
+ {
+ input_tokens: 112604,
+ output_tokens: 193,
+ total_tokens: 112797,
+ input_token_details: { cache_read: 0, cache_creation: 0 },
+ model: 'claude-opus-4-5-20251101',
+ },
+ {
+ input_tokens: 257440,
+ output_tokens: 2217,
+ total_tokens: 259657,
+ input_token_details: { cache_read: 0, cache_creation: 0 },
model: 'claude-opus-4-5-20251101',
},
];
- mockRecordCollectedUsage.mockResolvedValue({ input_tokens: 31596, output_tokens: 163 });
+ await client.recordCollectedUsage({
+ collectedUsage,
+ balance: { enabled: true },
+ transactions: { enabled: true },
+ });
+
+ // input_tokens should be first entry's input (initial context)
+ expect(client.usage.input_tokens).toBe(31596);
+
+ // output_tokens should be sum of all model outputs: 151 + 150 + 295 + 193 + 2217 = 3006
+ // NOT the inflated value from incremental calculation (338,559)
+ expect(client.usage.output_tokens).toBe(3006);
+
+ // Verify spendTokens was called for each entry with correct values
+ expect(mockSpendTokens).toHaveBeenCalledTimes(5);
+ expect(mockSpendTokens).toHaveBeenNthCalledWith(
+ 1,
+ expect.objectContaining({ model: 'claude-opus-4-5-20251101' }),
+ { promptTokens: 31596, completionTokens: 151 },
+ );
+ expect(mockSpendTokens).toHaveBeenNthCalledWith(
+ 5,
+ expect.objectContaining({ model: 'claude-opus-4-5-20251101' }),
+ { promptTokens: 257440, completionTokens: 2217 },
+ );
+ });
+
+ it('should handle single followup message correctly', async () => {
+ // Real production data: followup to the above conversation
+ const collectedUsage = [
+ {
+ input_tokens: 263406,
+ output_tokens: 257,
+ total_tokens: 263663,
+ input_token_details: { cache_read: 0, cache_creation: 0 },
+ model: 'claude-opus-4-5-20251101',
+ },
+ ];
await client.recordCollectedUsage({
collectedUsage,
@@ -265,14 +327,300 @@ describe('AgentClient - recordCollectedUsage', () => {
transactions: { enabled: true },
});
+ expect(client.usage.input_tokens).toBe(263406);
+ expect(client.usage.output_tokens).toBe(257);
+
+ expect(mockSpendTokens).toHaveBeenCalledTimes(1);
+ expect(mockSpendTokens).toHaveBeenCalledWith(
+ expect.objectContaining({ model: 'claude-opus-4-5-20251101' }),
+ { promptTokens: 263406, completionTokens: 257 },
+ );
+ });
+
+ it('should ensure output_tokens > 0 check passes for BaseClient.sendMessage', async () => {
+ // This verifies the fix for the duplicate token spending bug
+ // BaseClient.sendMessage checks: if (usage != null && Number(usage[this.outputTokensKey]) > 0)
+ const collectedUsage = [
+ {
+ input_tokens: 31596,
+ output_tokens: 151,
+ model: 'claude-opus-4-5-20251101',
+ },
+ {
+ input_tokens: 35368,
+ output_tokens: 150,
+ model: 'claude-opus-4-5-20251101',
+ },
+ ];
+
+ await client.recordCollectedUsage({
+ collectedUsage,
+ balance: { enabled: true },
+ transactions: { enabled: true },
+ });
+
+ const usage = client.getStreamUsage();
+
+ // The check that was failing before the fix
+ expect(usage).not.toBeNull();
+ expect(Number(usage.output_tokens)).toBeGreaterThan(0);
+
+ // Verify correct value
+ expect(usage.output_tokens).toBe(301); // 151 + 150
+ });
+
+ it('should correctly handle cache tokens with multiple tool calls', async () => {
+ // Real production data: Claude Opus with cache tokens (prompt caching)
+ // First entry has cache_creation, subsequent entries have cache_read
+ const collectedUsage = [
+ {
+ input_tokens: 788,
+ output_tokens: 163,
+ total_tokens: 951,
+ input_token_details: { cache_read: 0, cache_creation: 30808 },
+ model: 'claude-opus-4-5-20251101',
+ },
+ {
+ input_tokens: 3802,
+ output_tokens: 149,
+ total_tokens: 3951,
+ input_token_details: { cache_read: 30808, cache_creation: 768 },
+ model: 'claude-opus-4-5-20251101',
+ },
+ {
+ input_tokens: 26808,
+ output_tokens: 225,
+ total_tokens: 27033,
+ input_token_details: { cache_read: 31576, cache_creation: 0 },
+ model: 'claude-opus-4-5-20251101',
+ },
+ {
+ input_tokens: 80912,
+ output_tokens: 204,
+ total_tokens: 81116,
+ input_token_details: { cache_read: 31576, cache_creation: 0 },
+ model: 'claude-opus-4-5-20251101',
+ },
+ {
+ input_tokens: 136454,
+ output_tokens: 206,
+ total_tokens: 136660,
+ input_token_details: { cache_read: 31576, cache_creation: 0 },
+ model: 'claude-opus-4-5-20251101',
+ },
+ {
+ input_tokens: 146316,
+ output_tokens: 224,
+ total_tokens: 146540,
+ input_token_details: { cache_read: 31576, cache_creation: 0 },
+ model: 'claude-opus-4-5-20251101',
+ },
+ {
+ input_tokens: 150402,
+ output_tokens: 1248,
+ total_tokens: 151650,
+ input_token_details: { cache_read: 31576, cache_creation: 0 },
+ model: 'claude-opus-4-5-20251101',
+ },
+ {
+ input_tokens: 156268,
+ output_tokens: 139,
+ total_tokens: 156407,
+ input_token_details: { cache_read: 31576, cache_creation: 0 },
+ model: 'claude-opus-4-5-20251101',
+ },
+ {
+ input_tokens: 167126,
+ output_tokens: 2961,
+ total_tokens: 170087,
+ input_token_details: { cache_read: 31576, cache_creation: 0 },
+ model: 'claude-opus-4-5-20251101',
+ },
+ ];
+
+ await client.recordCollectedUsage({
+ collectedUsage,
+ balance: { enabled: true },
+ transactions: { enabled: true },
+ });
+
+ // input_tokens = first entry's input + cache_creation + cache_read
+ // = 788 + 30808 + 0 = 31596
expect(client.usage.input_tokens).toBe(31596);
- expect(client.usage.output_tokens).toBe(163);
+
+ // output_tokens = sum of all output_tokens
+ // = 163 + 149 + 225 + 204 + 206 + 224 + 1248 + 139 + 2961 = 5519
+ expect(client.usage.output_tokens).toBe(5519);
+
+ // First 2 entries have cache tokens, should use spendStructuredTokens
+ // Remaining 7 entries have cache_read but no cache_creation, still structured
+ expect(mockSpendStructuredTokens).toHaveBeenCalledTimes(9);
+ expect(mockSpendTokens).toHaveBeenCalledTimes(0);
+
+ // Verify first entry uses structured tokens with cache_creation
+ expect(mockSpendStructuredTokens).toHaveBeenNthCalledWith(
+ 1,
+ expect.objectContaining({ model: 'claude-opus-4-5-20251101' }),
+ {
+ promptTokens: { input: 788, write: 30808, read: 0 },
+ completionTokens: 163,
+ },
+ );
+
+ // Verify second entry uses structured tokens with both cache_creation and cache_read
+ expect(mockSpendStructuredTokens).toHaveBeenNthCalledWith(
+ 2,
+ expect.objectContaining({ model: 'claude-opus-4-5-20251101' }),
+ {
+ promptTokens: { input: 3802, write: 768, read: 30808 },
+ completionTokens: 149,
+ },
+ );
+ });
+ });
+
+ describe('cache token handling', () => {
+ it('should handle OpenAI format cache tokens (input_token_details)', async () => {
+ const collectedUsage = [
+ {
+ input_tokens: 100,
+ output_tokens: 50,
+ model: 'gpt-4',
+ input_token_details: {
+ cache_creation: 20,
+ cache_read: 10,
+ },
+ },
+ ];
+
+ await client.recordCollectedUsage({
+ collectedUsage,
+ balance: { enabled: true },
+ transactions: { enabled: true },
+ });
+
+ expect(mockSpendStructuredTokens).toHaveBeenCalledTimes(1);
+ expect(mockSpendStructuredTokens).toHaveBeenCalledWith(
+ expect.objectContaining({ model: 'gpt-4' }),
+ {
+ promptTokens: {
+ input: 100,
+ write: 20,
+ read: 10,
+ },
+ completionTokens: 50,
+ },
+ );
+ });
+
+ it('should handle Anthropic format cache tokens (cache_*_input_tokens)', async () => {
+ const collectedUsage = [
+ {
+ input_tokens: 100,
+ output_tokens: 50,
+ model: 'claude-3',
+ cache_creation_input_tokens: 25,
+ cache_read_input_tokens: 15,
+ },
+ ];
+
+ await client.recordCollectedUsage({
+ collectedUsage,
+ balance: { enabled: true },
+ transactions: { enabled: true },
+ });
+
+ expect(mockSpendStructuredTokens).toHaveBeenCalledTimes(1);
+ expect(mockSpendStructuredTokens).toHaveBeenCalledWith(
+ expect.objectContaining({ model: 'claude-3' }),
+ {
+ promptTokens: {
+ input: 100,
+ write: 25,
+ read: 15,
+ },
+ completionTokens: 50,
+ },
+ );
+ });
+
+ it('should use spendTokens for entries without cache tokens', async () => {
+ const collectedUsage = [{ input_tokens: 100, output_tokens: 50, model: 'gpt-4' }];
+
+ await client.recordCollectedUsage({
+ collectedUsage,
+ balance: { enabled: true },
+ transactions: { enabled: true },
+ });
+
+ expect(mockSpendTokens).toHaveBeenCalledTimes(1);
+ expect(mockSpendStructuredTokens).not.toHaveBeenCalled();
+ });
+
+ it('should handle mixed cache and non-cache entries', async () => {
+ const collectedUsage = [
+ { input_tokens: 100, output_tokens: 50, model: 'gpt-4' },
+ {
+ input_tokens: 150,
+ output_tokens: 30,
+ model: 'gpt-4',
+ input_token_details: { cache_creation: 10, cache_read: 5 },
+ },
+ { input_tokens: 200, output_tokens: 20, model: 'gpt-4' },
+ ];
+
+ await client.recordCollectedUsage({
+ collectedUsage,
+ balance: { enabled: true },
+ transactions: { enabled: true },
+ });
+
+ expect(mockSpendTokens).toHaveBeenCalledTimes(2);
+ expect(mockSpendStructuredTokens).toHaveBeenCalledTimes(1);
+ });
+
+ it('should include cache tokens in total input calculation', async () => {
+ const collectedUsage = [
+ {
+ input_tokens: 100,
+ output_tokens: 50,
+ model: 'gpt-4',
+ input_token_details: {
+ cache_creation: 20,
+ cache_read: 10,
+ },
+ },
+ ];
+
+ await client.recordCollectedUsage({
+ collectedUsage,
+ balance: { enabled: true },
+ transactions: { enabled: true },
+ });
+
+ // Total input should include cache tokens: 100 + 20 + 10 = 130
+ expect(client.usage.input_tokens).toBe(130);
});
});
describe('model fallback', () => {
- it('should use param model when available', async () => {
- mockRecordCollectedUsage.mockResolvedValue({ input_tokens: 100, output_tokens: 50 });
+ it('should use usage.model when available', async () => {
+ const collectedUsage = [{ input_tokens: 100, output_tokens: 50, model: 'gpt-4-turbo' }];
+
+ await client.recordCollectedUsage({
+ model: 'fallback-model',
+ collectedUsage,
+ balance: { enabled: true },
+ transactions: { enabled: true },
+ });
+
+ expect(mockSpendTokens).toHaveBeenCalledWith(
+ expect.objectContaining({ model: 'gpt-4-turbo' }),
+ expect.any(Object),
+ );
+ });
+
+ it('should fallback to param model when usage.model is missing', async () => {
const collectedUsage = [{ input_tokens: 100, output_tokens: 50 }];
await client.recordCollectedUsage({
@@ -282,13 +630,14 @@ describe('AgentClient - recordCollectedUsage', () => {
transactions: { enabled: true },
});
- const [, params] = mockRecordCollectedUsage.mock.calls[0];
- expect(params.model).toBe('param-model');
+ expect(mockSpendTokens).toHaveBeenCalledWith(
+ expect.objectContaining({ model: 'param-model' }),
+ expect.any(Object),
+ );
});
it('should fallback to client.model when param model is missing', async () => {
client.model = 'client-model';
- mockRecordCollectedUsage.mockResolvedValue({ input_tokens: 100, output_tokens: 50 });
const collectedUsage = [{ input_tokens: 100, output_tokens: 50 }];
await client.recordCollectedUsage({
@@ -297,12 +646,13 @@ describe('AgentClient - recordCollectedUsage', () => {
transactions: { enabled: true },
});
- const [, params] = mockRecordCollectedUsage.mock.calls[0];
- expect(params.model).toBe('client-model');
+ expect(mockSpendTokens).toHaveBeenCalledWith(
+ expect.objectContaining({ model: 'client-model' }),
+ expect.any(Object),
+ );
});
it('should fallback to agent model_parameters.model as last resort', async () => {
- mockRecordCollectedUsage.mockResolvedValue({ input_tokens: 100, output_tokens: 50 });
const collectedUsage = [{ input_tokens: 100, output_tokens: 50 }];
await client.recordCollectedUsage({
@@ -311,14 +661,15 @@ describe('AgentClient - recordCollectedUsage', () => {
transactions: { enabled: true },
});
- const [, params] = mockRecordCollectedUsage.mock.calls[0];
- expect(params.model).toBe('gpt-4');
+ expect(mockSpendTokens).toHaveBeenCalledWith(
+ expect.objectContaining({ model: 'gpt-4' }),
+ expect.any(Object),
+ );
});
});
describe('getStreamUsage integration', () => {
it('should return the usage object set by recordCollectedUsage', async () => {
- mockRecordCollectedUsage.mockResolvedValue({ input_tokens: 100, output_tokens: 50 });
const collectedUsage = [{ input_tokens: 100, output_tokens: 50, model: 'gpt-4' }];
await client.recordCollectedUsage({
@@ -328,7 +679,10 @@ describe('AgentClient - recordCollectedUsage', () => {
});
const usage = client.getStreamUsage();
- expect(usage).toEqual({ input_tokens: 100, output_tokens: 50 });
+ expect(usage).toEqual({
+ input_tokens: 100,
+ output_tokens: 50,
+ });
});
it('should return undefined before recordCollectedUsage is called', () => {
@@ -336,9 +690,9 @@ describe('AgentClient - recordCollectedUsage', () => {
expect(usage).toBeUndefined();
});
- /** Verifies usage passes the check in BaseClient.sendMessage: if (usage != null && Number(usage[this.outputTokensKey]) > 0) */
it('should have output_tokens > 0 for BaseClient.sendMessage check', async () => {
- mockRecordCollectedUsage.mockResolvedValue({ input_tokens: 200, output_tokens: 130 });
+ // This test verifies the usage will pass the check in BaseClient.sendMessage:
+ // if (usage != null && Number(usage[this.outputTokensKey]) > 0)
const collectedUsage = [
{ input_tokens: 200, output_tokens: 100, model: 'gpt-4' },
{ input_tokens: 50, output_tokens: 30, model: 'gpt-4' },
diff --git a/api/server/controllers/agents/request.js b/api/server/controllers/agents/request.js
index 6f7e1b88c1..79387b6e89 100644
--- a/api/server/controllers/agents/request.js
+++ b/api/server/controllers/agents/request.js
@@ -3,9 +3,9 @@ const { Constants, ViolationTypes } = require('librechat-data-provider');
const {
sendEvent,
getViolationInfo,
- buildMessageFiles,
GenerationJobManager,
decrementPendingRequest,
+ sanitizeFileForTransmit,
sanitizeMessageForTransmit,
checkAndIncrementPendingRequest,
} = require('@librechat/api');
@@ -131,15 +131,9 @@ const ResumableAgentController = async (req, res, next, initializeClient, addTit
partialMessage.agent_id = req.body.agent_id;
}
- await saveMessage(
- {
- userId: req?.user?.id,
- isTemporary: req?.body?.isTemporary,
- interfaceConfig: req?.config?.interfaceConfig,
- },
- partialMessage,
- { context: 'api/server/controllers/agents/request.js - partial response on disconnect' },
- );
+ await saveMessage(req, partialMessage, {
+ context: 'api/server/controllers/agents/request.js - partial response on disconnect',
+ });
logger.debug(
`[ResumableAgentController] Saved partial response for ${streamId}, content parts: ${aggregatedContent.length}`,
@@ -258,10 +252,13 @@ const ResumableAgentController = async (req, res, next, initializeClient, addTit
conversation.title =
conversation && !conversation.title ? null : conversation?.title || 'New Chat';
- if (req.body.files && Array.isArray(client.options.attachments)) {
- const files = buildMessageFiles(req.body.files, client.options.attachments);
- if (files.length > 0) {
- userMessage.files = files;
+ if (req.body.files && client.options?.attachments) {
+ userMessage.files = [];
+ const messageFiles = new Set(req.body.files.map((file) => file.file_id));
+ for (const attachment of client.options.attachments) {
+ if (messageFiles.has(attachment.file_id)) {
+ userMessage.files.push(sanitizeFileForTransmit(attachment));
+ }
}
delete userMessage.image_urls;
}
@@ -277,14 +274,8 @@ const ResumableAgentController = async (req, res, next, initializeClient, addTit
// Save user message BEFORE sending final event to avoid race condition
// where client refetch happens before database is updated
- const reqCtx = {
- userId: req?.user?.id,
- isTemporary: req?.body?.isTemporary,
- interfaceConfig: req?.config?.interfaceConfig,
- };
-
if (!client.skipSaveUserMessage && userMessage) {
- await saveMessage(reqCtx, userMessage, {
+ await saveMessage(req, userMessage, {
context: 'api/server/controllers/agents/request.js - resumable user message',
});
}
@@ -294,7 +285,7 @@ const ResumableAgentController = async (req, res, next, initializeClient, addTit
// before the response is saved to the database, causing orphaned parentMessageIds.
if (client.savedMessageIds && !client.savedMessageIds.has(messageId)) {
await saveMessage(
- reqCtx,
+ req,
{ ...response, user: userId, unfinished: wasAbortedBeforeComplete },
{ context: 'api/server/controllers/agents/request.js - resumable response end' },
);
@@ -648,10 +639,14 @@ const _LegacyAgentController = async (req, res, next, initializeClient, addTitle
conversation.title =
conversation && !conversation.title ? null : conversation?.title || 'New Chat';
- if (req.body.files && Array.isArray(client.options.attachments)) {
- const files = buildMessageFiles(req.body.files, client.options.attachments);
- if (files.length > 0) {
- userMessage.files = files;
+ // Process files if needed (sanitize to remove large text fields before transmission)
+ if (req.body.files && client.options?.attachments) {
+ userMessage.files = [];
+ const messageFiles = new Set(req.body.files.map((file) => file.file_id));
+ for (const attachment of client.options.attachments) {
+ if (messageFiles.has(attachment.file_id)) {
+ userMessage.files.push(sanitizeFileForTransmit(attachment));
+ }
}
delete userMessage.image_urls;
}
@@ -673,11 +668,7 @@ const _LegacyAgentController = async (req, res, next, initializeClient, addTitle
// Save the message if needed
if (client.savedMessageIds && !client.savedMessageIds.has(messageId)) {
await saveMessage(
- {
- userId: req?.user?.id,
- isTemporary: req?.body?.isTemporary,
- interfaceConfig: req?.config?.interfaceConfig,
- },
+ req,
{ ...finalResponse, user: userId },
{ context: 'api/server/controllers/agents/request.js - response end' },
);
@@ -706,15 +697,9 @@ const _LegacyAgentController = async (req, res, next, initializeClient, addTitle
// Save user message if needed
if (!client.skipSaveUserMessage) {
- await saveMessage(
- {
- userId: req?.user?.id,
- isTemporary: req?.body?.isTemporary,
- interfaceConfig: req?.config?.interfaceConfig,
- },
- userMessage,
- { context: "api/server/controllers/agents/request.js - don't skip saving user message" },
- );
+ await saveMessage(req, userMessage, {
+ context: "api/server/controllers/agents/request.js - don't skip saving user message",
+ });
}
// Add title if needed - extract minimal data
diff --git a/api/server/controllers/agents/responses.js b/api/server/controllers/agents/responses.js
index 7abddf5e2f..afdb96be9f 100644
--- a/api/server/controllers/agents/responses.js
+++ b/api/server/controllers/agents/responses.js
@@ -32,13 +32,13 @@ const {
} = require('@librechat/api');
const {
createResponsesToolEndCallback,
- buildSummarizationHandlers,
- markSummarizationUsage,
createToolEndCallback,
- agentLogHandlerObj,
} = require('~/server/controllers/agents/callbacks');
const { loadAgentTools, loadToolsForExecution } = require('~/server/services/ToolService');
const { findAccessibleResources } = require('~/server/services/PermissionService');
+const { getConvoFiles, saveConvo, getConvo } = require('~/models/Conversation');
+const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
+const { getAgent, getAgents } = require('~/models/Agent');
const db = require('~/models');
/** @type {import('@librechat/api').AppConfig | null} */
@@ -213,12 +213,8 @@ async function saveResponseOutput(req, conversationId, responseId, response, age
* @returns {Promise}
*/
async function saveConversation(req, conversationId, agentId, agent) {
- await db.saveConvo(
- {
- userId: req?.user?.id,
- isTemporary: req?.body?.isTemporary,
- interfaceConfig: req?.config?.interfaceConfig,
- },
+ await saveConvo(
+ req,
{
conversationId,
endpoint: EModelEndpoint.agents,
@@ -280,10 +276,9 @@ const createResponse = async (req, res) => {
const request = validation.request;
const agentId = request.model;
const isStreaming = request.stream === true;
- const summarizationConfig = req.config?.summarization;
// Look up the agent
- const agent = await db.getAgent({ id: agentId });
+ const agent = await getAgent({ id: agentId });
if (!agent) {
return sendResponsesErrorResponse(
res,
@@ -296,6 +291,10 @@ const createResponse = async (req, res) => {
// Generate IDs
const responseId = generateResponseId();
+ const conversationId = request.previous_response_id ?? uuidv4();
+ const parentMessageId = null;
+
+ // Create response context
const context = createResponseContext(request, responseId);
logger.debug(
@@ -314,23 +313,6 @@ const createResponse = async (req, res) => {
});
try {
- if (request.previous_response_id != null) {
- if (typeof request.previous_response_id !== 'string') {
- return sendResponsesErrorResponse(
- res,
- 400,
- 'previous_response_id must be a string',
- 'invalid_request',
- );
- }
- if (!(await db.getConvo(req.user?.id, request.previous_response_id))) {
- return sendResponsesErrorResponse(res, 404, 'Conversation not found', 'not_found');
- }
- }
-
- const conversationId = request.previous_response_id ?? uuidv4();
- const parentMessageId = null;
-
// Build allowed providers set
const allowedProviders = new Set(
appConfig?.endpoints?.[EModelEndpoint.agents]?.allowedProviders,
@@ -359,7 +341,7 @@ const createResponse = async (req, res) => {
isInitialAgent: true,
},
{
- getConvoFiles: db.getConvoFiles,
+ getConvoFiles,
getFiles: db.getFiles,
getUserKey: db.getUserKey,
getMessages: db.getMessages,
@@ -391,11 +373,11 @@ const createResponse = async (req, res) => {
const allMessages = [...previousMessages, ...inputMessages];
const toolSet = buildToolSet(primaryConfig);
- const {
- messages: formattedMessages,
- indexTokenCountMap,
- summary: initialSummary,
- } = formatAgentMessages(allMessages, {}, toolSet);
+ const { messages: formattedMessages, indexTokenCountMap } = formatAgentMessages(
+ allMessages,
+ {},
+ toolSet,
+ );
// Create tracker for streaming or aggregator for non-streaming
const tracker = actuallyStreaming ? createResponseTracker() : null;
@@ -446,7 +428,6 @@ const createResponse = async (req, res) => {
toolRegistry: primaryConfig.toolRegistry,
userMCPAuthMap: primaryConfig.userMCPAuthMap,
tool_resources: primaryConfig.tool_resources,
- actionsEnabled: primaryConfig.actionsEnabled,
});
},
toolEndCallback,
@@ -459,12 +440,11 @@ const createResponse = async (req, res) => {
on_run_step: responsesHandlers.on_run_step,
on_run_step_delta: responsesHandlers.on_run_step_delta,
on_chat_model_end: {
- handle: (event, data, metadata) => {
+ handle: (event, data) => {
responsesHandlers.on_chat_model_end.handle(event, data);
const usage = data?.output?.usage_metadata;
if (usage) {
- const taggedUsage = markSummarizationUsage(usage, metadata);
- collectedUsage.push(taggedUsage);
+ collectedUsage.push(usage);
}
},
},
@@ -475,10 +455,6 @@ const createResponse = async (req, res) => {
on_agent_update: { handle: () => {} },
on_custom_event: { handle: () => {} },
on_tool_execute: createToolExecuteHandler(toolExecuteOptions),
- on_agent_log: agentLogHandlerObj,
- ...(summarizationConfig?.enabled !== false
- ? buildSummarizationHandlers({ isStreaming: actuallyStreaming, res })
- : {}),
};
// Create and run the agent
@@ -489,9 +465,7 @@ const createResponse = async (req, res) => {
agents: [primaryConfig],
messages: formattedMessages,
indexTokenCountMap,
- initialSummary,
runId: responseId,
- summarizationConfig,
signal: abortController.signal,
customHandlers: handlers,
requestBody: {
@@ -512,10 +486,6 @@ const createResponse = async (req, res) => {
thread_id: conversationId,
user_id: userId,
user: createSafeUser(req.user),
- requestBody: {
- messageId: responseId,
- conversationId,
- },
...(userMCPAuthMap != null && { userMCPAuthMap }),
},
signal: abortController.signal,
@@ -535,18 +505,12 @@ const createResponse = async (req, res) => {
const balanceConfig = getBalanceConfig(req.config);
const transactionsConfig = getTransactionsConfig(req.config);
recordCollectedUsage(
- {
- spendTokens: db.spendTokens,
- spendStructuredTokens: db.spendStructuredTokens,
- pricing: { getMultiplier: db.getMultiplier, getCacheMultiplier: db.getCacheMultiplier },
- bulkWriteOps: { insertMany: db.bulkInsertTransactions, updateBalance: db.updateBalance },
- },
+ { spendTokens, spendStructuredTokens },
{
user: userId,
conversationId,
collectedUsage,
context: 'message',
- messageId: responseId,
balance: balanceConfig,
transactions: transactionsConfig,
model: primaryConfig.model || agent.model_parameters?.model,
@@ -611,7 +575,6 @@ const createResponse = async (req, res) => {
toolRegistry: primaryConfig.toolRegistry,
userMCPAuthMap: primaryConfig.userMCPAuthMap,
tool_resources: primaryConfig.tool_resources,
- actionsEnabled: primaryConfig.actionsEnabled,
});
},
toolEndCallback,
@@ -623,12 +586,11 @@ const createResponse = async (req, res) => {
on_run_step: aggregatorHandlers.on_run_step,
on_run_step_delta: aggregatorHandlers.on_run_step_delta,
on_chat_model_end: {
- handle: (event, data, metadata) => {
+ handle: (event, data) => {
aggregatorHandlers.on_chat_model_end.handle(event, data);
const usage = data?.output?.usage_metadata;
if (usage) {
- const taggedUsage = markSummarizationUsage(usage, metadata);
- collectedUsage.push(taggedUsage);
+ collectedUsage.push(usage);
}
},
},
@@ -639,10 +601,6 @@ const createResponse = async (req, res) => {
on_agent_update: { handle: () => {} },
on_custom_event: { handle: () => {} },
on_tool_execute: createToolExecuteHandler(toolExecuteOptions),
- on_agent_log: agentLogHandlerObj,
- ...(summarizationConfig?.enabled !== false
- ? buildSummarizationHandlers({ isStreaming: false, res })
- : {}),
};
const userId = req.user?.id ?? 'api-user';
@@ -652,9 +610,7 @@ const createResponse = async (req, res) => {
agents: [primaryConfig],
messages: formattedMessages,
indexTokenCountMap,
- initialSummary,
runId: responseId,
- summarizationConfig,
signal: abortController.signal,
customHandlers: handlers,
requestBody: {
@@ -674,10 +630,6 @@ const createResponse = async (req, res) => {
thread_id: conversationId,
user_id: userId,
user: createSafeUser(req.user),
- requestBody: {
- messageId: responseId,
- conversationId,
- },
...(userMCPAuthMap != null && { userMCPAuthMap }),
},
signal: abortController.signal,
@@ -697,18 +649,12 @@ const createResponse = async (req, res) => {
const balanceConfig = getBalanceConfig(req.config);
const transactionsConfig = getTransactionsConfig(req.config);
recordCollectedUsage(
- {
- spendTokens: db.spendTokens,
- spendStructuredTokens: db.spendStructuredTokens,
- pricing: { getMultiplier: db.getMultiplier, getCacheMultiplier: db.getCacheMultiplier },
- bulkWriteOps: { insertMany: db.bulkInsertTransactions, updateBalance: db.updateBalance },
- },
+ { spendTokens, spendStructuredTokens },
{
user: userId,
conversationId,
collectedUsage,
context: 'message',
- messageId: responseId,
balance: balanceConfig,
transactions: transactionsConfig,
model: primaryConfig.model || agent.model_parameters?.model,
@@ -800,7 +746,7 @@ const listModels = async (req, res) => {
// Get the accessible agents
let agents = [];
if (accessibleAgentIds.length > 0) {
- agents = await db.getAgents({ _id: { $in: accessibleAgentIds } });
+ agents = await getAgents({ _id: { $in: accessibleAgentIds } });
}
// Convert to models format
@@ -850,7 +796,7 @@ const getResponse = async (req, res) => {
// The responseId could be either the response ID or the conversation ID
// Try to find a conversation with this ID
- const conversation = await db.getConvo(userId, responseId);
+ const conversation = await getConvo(userId, responseId);
if (!conversation) {
return sendResponsesErrorResponse(
diff --git a/api/server/controllers/agents/v1.js b/api/server/controllers/agents/v1.js
index e365b232e4..34078b2250 100644
--- a/api/server/controllers/agents/v1.js
+++ b/api/server/controllers/agents/v1.js
@@ -3,11 +3,9 @@ const fs = require('fs').promises;
const { nanoid } = require('nanoid');
const { logger } = require('@librechat/data-schemas');
const {
- refreshS3Url,
agentCreateSchema,
agentUpdateSchema,
refreshListAvatars,
- collectEdgeAgentIds,
mergeAgentOcrConversion,
MAX_AVATAR_REFRESH_AGENTS,
convertOcrToContextInPlace,
@@ -26,22 +24,30 @@ const {
actionDelimiter,
removeNullishValues,
} = require('librechat-data-provider');
+const {
+ getListAgentsByAccess,
+ countPromotedAgents,
+ revertAgentVersion,
+ createAgent,
+ updateAgent,
+ deleteAgent,
+ getAgent,
+} = require('~/models/Agent');
const {
findPubliclyAccessibleResources,
- getResourcePermissionsMap,
findAccessibleResources,
hasPublicPermission,
grantPermission,
} = require('~/server/services/PermissionService');
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
+const { getCategoriesWithCounts, deleteFileByFilter } = require('~/models');
const { resizeAvatar } = require('~/server/services/Files/images/avatar');
const { getFileStrategy } = require('~/server/utils/getFileStrategy');
+const { refreshS3Url } = require('~/server/services/Files/S3/crud');
const { filterFile } = require('~/server/services/Files/process');
+const { updateAction, getActions } = require('~/models/Action');
const { getCachedTools } = require('~/server/services/Config');
-const { resolveConfigServers } = require('~/server/services/MCP');
-const { getMCPServersRegistry } = require('~/config');
const { getLogStores } = require('~/cache');
-const db = require('~/models');
const systemTools = {
[Tools.execute_code]: true,
@@ -52,122 +58,6 @@ const systemTools = {
const MAX_SEARCH_LEN = 100;
const escapeRegex = (str = '') => str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
-/**
- * Validates that the requesting user has VIEW access to every agent referenced in edges.
- * Agents that do not exist in the database are skipped — at create time, the `from` field
- * often references the agent being built, which has no DB record yet.
- * @param {import('librechat-data-provider').GraphEdge[]} edges
- * @param {string} userId
- * @param {string} userRole - Used for group/role principal resolution
- * @returns {Promise} Agent IDs the user cannot VIEW (empty if all accessible)
- */
-const validateEdgeAgentAccess = async (edges, userId, userRole) => {
- const edgeAgentIds = collectEdgeAgentIds(edges);
- if (edgeAgentIds.size === 0) {
- return [];
- }
-
- const agents = await db.getAgents({ id: { $in: [...edgeAgentIds] } });
-
- if (agents.length === 0) {
- return [];
- }
-
- const permissionsMap = await getResourcePermissionsMap({
- userId,
- role: userRole,
- resourceType: ResourceType.AGENT,
- resourceIds: agents.map((a) => a._id),
- });
-
- return agents
- .filter((a) => {
- const bits = permissionsMap.get(a._id.toString()) ?? 0;
- return (bits & PermissionBits.VIEW) === 0;
- })
- .map((a) => a.id);
-};
-
-/**
- * Filters tools to only include those the user is authorized to use.
- * MCP tools must match the exact format `{toolName}_mcp_{serverName}` (exactly 2 segments).
- * Multi-delimiter keys are rejected to prevent authorization/execution mismatch.
- * Non-MCP tools must appear in availableTools (global tool cache) or systemTools.
- *
- * When `existingTools` is provided and the MCP registry is unavailable (e.g. server restart),
- * tools already present on the agent are preserved rather than stripped — they were validated
- * when originally added, and we cannot re-verify them without the registry.
- * @param {object} params
- * @param {string[]} params.tools - Raw tool strings from the request
- * @param {string} params.userId - Requesting user ID for MCP server access check
- * @param {Record