fix: Avoid Throwing Errors for Unsupported Token Count Endpoints 🪙 (#1356)

This commit is contained in:
Danny Avila 2023-12-15 02:40:15 -05:00 committed by GitHub
parent 561ce8e86a
commit ff59a2e41d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 12 additions and 4 deletions

View file

@ -1,5 +1,5 @@
const { promptTokensEstimate } = require('openai-chat-tokens');
const { EModelEndpoint } = require('librechat-data-provider');
const { EModelEndpoint, supportsBalanceCheck } = require('librechat-data-provider');
const { formatFromLangChain } = require('~/app/clients/prompts');
const checkBalance = require('~/models/checkBalance');
const { isEnabled } = require('~/server/utils');
@ -49,7 +49,8 @@ const createStartHandler = ({
prelimPromptTokens += tokenBuffer;
try {
if (isEnabled(process.env.CHECK_BALANCE)) {
// TODO: if plugins extends to non-OpenAI models, this will need to be updated
if (isEnabled(process.env.CHECK_BALANCE) && supportsBalanceCheck[EModelEndpoint.openAI]) {
const generations =
initialMessageCount && messages.length > initialMessageCount
? messages.slice(initialMessageCount)