fix: Avoid Throwing Errors for Unsupported Token Count Endpoints 🪙 (#1356)

This commit is contained in:
Danny Avila 2023-12-15 02:40:15 -05:00 committed by GitHub
parent 561ce8e86a
commit ff59a2e41d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 12 additions and 4 deletions

View file

@ -1,8 +1,9 @@
const crypto = require('crypto');
const TextStream = require('./TextStream');
const { supportsBalanceCheck } = require('librechat-data-provider');
const { getConvo, getMessages, saveMessage, updateMessage, saveConvo } = require('~/models');
const { addSpaceIfNeeded, isEnabled } = require('~/server/utils');
const checkBalance = require('~/models/checkBalance');
const TextStream = require('./TextStream');
const { logger } = require('~/config');
class BaseClient {
@ -424,7 +425,7 @@ class BaseClient {
await this.saveMessageToDatabase(userMessage, saveOptions, user);
}
if (isEnabled(process.env.CHECK_BALANCE)) {
if (isEnabled(process.env.CHECK_BALANCE) && supportsBalanceCheck[this.options.endpoint]) {
await checkBalance({
req: this.options.req,
res: this.options.res,

View file

@ -1,5 +1,5 @@
const { promptTokensEstimate } = require('openai-chat-tokens');
const { EModelEndpoint } = require('librechat-data-provider');
const { EModelEndpoint, supportsBalanceCheck } = require('librechat-data-provider');
const { formatFromLangChain } = require('~/app/clients/prompts');
const checkBalance = require('~/models/checkBalance');
const { isEnabled } = require('~/server/utils');
@ -49,7 +49,8 @@ const createStartHandler = ({
prelimPromptTokens += tokenBuffer;
try {
if (isEnabled(process.env.CHECK_BALANCE)) {
// TODO: if plugins extends to non-OpenAI models, this will need to be updated
if (isEnabled(process.env.CHECK_BALANCE) && supportsBalanceCheck[EModelEndpoint.openAI]) {
const generations =
initialMessageCount && messages.length > initialMessageCount
? messages.slice(initialMessageCount)

View file

@ -138,6 +138,12 @@ export const supportsFiles = {
[EModelEndpoint.assistant]: true,
};
export const supportsBalanceCheck = {
[EModelEndpoint.openAI]: true,
[EModelEndpoint.azureOpenAI]: true,
[EModelEndpoint.gptPlugins]: true,
};
export const visionModels = ['gpt-4-vision', 'llava-13b'];
export const eModelEndpointSchema = z.nativeEnum(EModelEndpoint);