mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-28 06:08:50 +01:00
🏗️ fix: Agents Token Spend Race Conditions, Add Auto-refill Tx, Add Relevant Tests (#6480)
* 🏗️ refactor: Improve spendTokens logic to handle zero completion tokens and enhance test coverage * 🏗️ test: Add tests to ensure balance does not go below zero when spending tokens * 🏗️ fix: Ensure proper continuation in AgentClient when handling errors * fix: spend token race conditions * 🏗️ test: Add test for handling multiple concurrent transactions with high balance * fix: Handle Omni models prompt prefix handling for user messages with array content in OpenAIClient * refactor: Update checkBalance import paths to use new balanceMethods module * refactor: Update checkBalance imports and implement updateBalance function for atomic balance updates * fix: import from replace method * feat: Add createAutoRefillTransaction method to handle non-balance updating transactions * refactor: Move auto-refill logic to balanceMethods and enhance checkBalance functionality * feat: Implement logging for auto-refill transactions in balance checks * refactor: Remove logRefill calls from multiple client and handler files * refactor: Move balance checking and auto-refill logic to balanceMethods for improved structure * refactor: Simplify balance check calls by removing unnecessary balanceRecord assignments * fix: Prevent negative rawAmount in spendTokens when promptTokens is zero * fix: Update balanceMethods to use Balance model for findOneAndUpdate * chore: import order * refactor: remove unused txMethods file to streamline codebase * feat: enhance updateBalance and createAutoRefillTransaction methods to support additional parameters for improved balance management
This commit is contained in:
parent
5e6a3ec219
commit
842b68fc32
13 changed files with 807 additions and 279 deletions
|
|
@ -5,6 +5,7 @@ const { SplitStreamHandler, GraphEvents } = require('@librechat/agents');
|
|||
const {
|
||||
Constants,
|
||||
ImageDetail,
|
||||
ContentTypes,
|
||||
EModelEndpoint,
|
||||
resolveHeaders,
|
||||
KnownEndpoints,
|
||||
|
|
@ -505,8 +506,24 @@ class OpenAIClient extends BaseClient {
|
|||
if (promptPrefix && this.isOmni === true) {
|
||||
const lastUserMessageIndex = payload.findLastIndex((message) => message.role === 'user');
|
||||
if (lastUserMessageIndex !== -1) {
|
||||
payload[lastUserMessageIndex].content =
|
||||
`${promptPrefix}\n${payload[lastUserMessageIndex].content}`;
|
||||
if (Array.isArray(payload[lastUserMessageIndex].content)) {
|
||||
const firstTextPartIndex = payload[lastUserMessageIndex].content.findIndex(
|
||||
(part) => part.type === ContentTypes.TEXT,
|
||||
);
|
||||
if (firstTextPartIndex !== -1) {
|
||||
const firstTextPart = payload[lastUserMessageIndex].content[firstTextPartIndex];
|
||||
payload[lastUserMessageIndex].content[firstTextPartIndex].text =
|
||||
`${promptPrefix}\n${firstTextPart.text}`;
|
||||
} else {
|
||||
payload[lastUserMessageIndex].content.unshift({
|
||||
type: ContentTypes.TEXT,
|
||||
text: promptPrefix,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
payload[lastUserMessageIndex].content =
|
||||
`${promptPrefix}\n${payload[lastUserMessageIndex].content}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue