refactor: move tokens.js over to packages/api and update imports

This commit is contained in:
Dustin Healy 2025-08-30 15:25:13 -07:00 committed by Danny Avila
parent efdad28b70
commit d1d4c2eb27
No known key found for this signature in database
GPG key ID: BF31EEB2C5CA0956
17 changed files with 39 additions and 34 deletions

View file

@ -1,7 +1,7 @@
const { v4 } = require('uuid');
const { sleep } = require('@librechat/agents');
const { logger } = require('@librechat/data-schemas');
const { sendEvent, getBalanceConfig } = require('@librechat/api');
const { sendEvent, getBalanceConfig, getModelMaxTokens } = require('@librechat/api');
const {
Time,
Constants,
@ -34,7 +34,6 @@ const { checkBalance } = require('~/models/balanceMethods');
const { getConvo } = require('~/models/Conversation');
const getLogStores = require('~/cache/getLogStores');
const { countTokens } = require('~/server/utils');
const { getModelMaxTokens } = require('~/utils');
const { getOpenAIClient } = require('./helpers');
/**

View file

@ -1,7 +1,7 @@
const { v4 } = require('uuid');
const { sleep } = require('@librechat/agents');
const { logger } = require('@librechat/data-schemas');
const { sendEvent, getBalanceConfig } = require('@librechat/api');
const { sendEvent, getBalanceConfig, getModelMaxTokens } = require('@librechat/api');
const {
Time,
Constants,
@ -31,7 +31,6 @@ const { checkBalance } = require('~/models/balanceMethods');
const { getConvo } = require('~/models/Conversation');
const getLogStores = require('~/cache/getLogStores');
const { countTokens } = require('~/server/utils');
const { getModelMaxTokens } = require('~/utils');
const { getOpenAIClient } = require('./helpers');
/**

View file

@ -1,6 +1,7 @@
const { Providers } = require('@librechat/agents');
const {
primeResources,
getModelMaxTokens,
extractLibreChatParams,
optionalChainWithEmptyCheck,
} = require('@librechat/api');
@ -17,7 +18,6 @@ const { getProviderConfig } = require('~/server/services/Endpoints');
const { processFiles } = require('~/server/services/Files/process');
const { getFiles, getToolFilesByIds } = require('~/models/File');
const { getConvoFiles } = require('~/models/Conversation');
const { getModelMaxTokens } = require('~/utils');
/**
* @param {object} params

View file

@ -1,5 +1,5 @@
const { matchModelName } = require('@librechat/api');
const { EModelEndpoint, anthropicSettings } = require('librechat-data-provider');
const { matchModelName } = require('~/utils');
const { logger } = require('~/config');
/**

View file

@ -1,3 +1,4 @@
const { getModelMaxTokens } = require('@librechat/api');
const { createContentAggregator } = require('@librechat/agents');
const {
EModelEndpoint,
@ -7,7 +8,6 @@ const {
const { getDefaultHandlers } = require('~/server/controllers/agents/callbacks');
const getOptions = require('~/server/services/Endpoints/bedrock/options');
const AgentClient = require('~/server/controllers/agents/client');
const { getModelMaxTokens } = require('~/utils');
const initializeClient = async ({ req, res, endpointOption }) => {
if (!endpointOption) {

View file

@ -1,13 +1,13 @@
const axios = require('axios');
const { Providers } = require('@librechat/agents');
const { logAxiosError } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const { HttpsProxyAgent } = require('https-proxy-agent');
const { logAxiosError, inputSchema, processModelData } = require('@librechat/api');
const { EModelEndpoint, defaultModels, CacheKeys } = require('librechat-data-provider');
const { inputSchema, extractBaseURL, processModelData } = require('~/utils');
const { OllamaClient } = require('~/app/clients/OllamaClient');
const { isUserProvided } = require('~/server/utils');
const getLogStores = require('~/cache/getLogStores');
const { extractBaseURL } = require('~/utils');
/**
* Splits a string by commas and trims each resulting value.

View file

@ -11,8 +11,8 @@ const {
getAnthropicModels,
} = require('./ModelService');
jest.mock('~/utils', () => {
const originalUtils = jest.requireActual('~/utils');
jest.mock('@librechat/api', () => {
const originalUtils = jest.requireActual('@librechat/api');
return {
...originalUtils,
processModelData: jest.fn((...args) => {
@ -108,7 +108,7 @@ describe('fetchModels with createTokenConfig true', () => {
beforeEach(() => {
// Clears the mock's history before each test
const _utils = require('~/utils');
const _utils = require('@librechat/api');
axios.get.mockResolvedValue({ data });
});
@ -120,7 +120,7 @@ describe('fetchModels with createTokenConfig true', () => {
createTokenConfig: true,
});
const { processModelData } = require('~/utils');
const { processModelData } = require('@librechat/api');
expect(processModelData).toHaveBeenCalled();
expect(processModelData).toHaveBeenCalledWith(data);
});