mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-09-22 08:12:00 +02:00
fix: Avoid Throwing Errors for Unsupported Token Count Endpoints 🪙 (#1356)
This commit is contained in:
parent
561ce8e86a
commit
ff59a2e41d
3 changed files with 12 additions and 4 deletions
|
@ -1,5 +1,5 @@
|
|||
const { promptTokensEstimate } = require('openai-chat-tokens');
|
||||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const { EModelEndpoint, supportsBalanceCheck } = require('librechat-data-provider');
|
||||
const { formatFromLangChain } = require('~/app/clients/prompts');
|
||||
const checkBalance = require('~/models/checkBalance');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
|
@ -49,7 +49,8 @@ const createStartHandler = ({
|
|||
prelimPromptTokens += tokenBuffer;
|
||||
|
||||
try {
|
||||
if (isEnabled(process.env.CHECK_BALANCE)) {
|
||||
// TODO: if plugins extends to non-OpenAI models, this will need to be updated
|
||||
if (isEnabled(process.env.CHECK_BALANCE) && supportsBalanceCheck[EModelEndpoint.openAI]) {
|
||||
const generations =
|
||||
initialMessageCount && messages.length > initialMessageCount
|
||||
? messages.slice(initialMessageCount)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue