mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-22 19:30:15 +01:00
refactor: add back getTokenCountForResponse for slightly more accurate mapping of responses token counts (#1067)
This commit is contained in:
parent
6d8aed7ef8
commit
377f2c7c19
3 changed files with 26 additions and 6 deletions
|
|
@ -230,13 +230,15 @@ If your reverse proxy is compatible to OpenAI specs in every other way, it may s
|
|||
console.debug('[handleResponseMessage] Output:', { output, errorMessage, ...result });
|
||||
const { error } = responseMessage;
|
||||
if (!error) {
|
||||
responseMessage.tokenCount = this.getTokenCount(responseMessage.text);
|
||||
responseMessage.completionTokens = responseMessage.tokenCount;
|
||||
responseMessage.tokenCount = this.getTokenCountForResponse(responseMessage);
|
||||
responseMessage.completionTokens = this.getTokenCount(responseMessage.text);
|
||||
}
|
||||
|
||||
// Record usage only when completion is skipped as it is already recorded in the agent phase.
|
||||
if (!this.agentOptions.skipCompletion && !error) {
|
||||
await this.recordTokenUsage(responseMessage);
|
||||
}
|
||||
|
||||
await this.saveMessageToDatabase(responseMessage, saveOptions, user);
|
||||
delete responseMessage.tokenCount;
|
||||
return { ...responseMessage, ...result };
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue