🪙 feat: Add messageId to Transactions (#11987)
Some checks are pending
Docker Dev Branch Images Build / build (Dockerfile, lc-dev, node) (push) Waiting to run
Docker Dev Branch Images Build / build (Dockerfile.multi, lc-dev-api, api-build) (push) Waiting to run

* feat: Add messageId to transactions

* chore: field order

* feat: Enhance token usage tracking by adding messageId parameter

- Updated `recordTokenUsage` method in BaseClient to accept a new `messageId` parameter for improved tracking.
- Propagated `messageId` in the AgentClient when recording usage.
- Added tests to ensure `messageId` is correctly passed and handled in various scenarios, including propagation across multiple usage entries.

* chore: Correct field order in createGeminiImageTool function

- Moved the conversationId field to the correct position in the object being passed to the recordTokenUsage method, ensuring proper parameter alignment for improved functionality.

* refactor: Update OpenAIChatCompletionController and createResponse to use responseId instead of requestId

- Replaced instances of requestId with responseId in the OpenAIChatCompletionController for improved clarity in logging and tracking.
- Updated createResponse to include responseId in the requestBody, ensuring consistency across the handling of message identifiers.

* test: Add messageId to agent client tests

- Included messageId in the agent client tests to ensure proper handling and propagation of message identifiers during transaction recording.
- This update enhances the test coverage for scenarios involving messageId, aligning with recent changes in the tracking of message identifiers.

* fix: Update OpenAIChatCompletionController to use requestId for context

- Changed the context object in OpenAIChatCompletionController to use `requestId` instead of `responseId` for improved clarity and consistency in handling request identifiers.

* chore: field order
This commit is contained in:
Danny Avila 2026-02-27 23:50:13 -05:00 committed by GitHub
parent 6169d4f70b
commit 8b159079f5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
10 changed files with 149 additions and 13 deletions

View file

@ -379,6 +379,7 @@ describe('recordCollectedUsage', () => {
await recordCollectedUsage(deps, {
...baseParams,
messageId: 'msg-123',
endpointTokenConfig,
collectedUsage,
});
@ -389,6 +390,7 @@ describe('recordCollectedUsage', () => {
conversationId: 'convo-123',
model: 'gpt-4',
context: 'message',
messageId: 'msg-123',
balance: { enabled: true },
transactions: { enabled: true },
endpointTokenConfig,
@ -431,4 +433,93 @@ describe('recordCollectedUsage', () => {
);
});
});
describe('messageId propagation', () => {
it('should pass messageId to spendTokens', async () => {
const collectedUsage: UsageMetadata[] = [
{ input_tokens: 10, output_tokens: 5, model: 'gpt-4' },
];
await recordCollectedUsage(deps, {
...baseParams,
messageId: 'msg-1',
collectedUsage,
});
expect(mockSpendTokens).toHaveBeenCalledWith(
expect.objectContaining({ messageId: 'msg-1' }),
expect.any(Object),
);
});
it('should pass messageId to spendStructuredTokens for cache paths', async () => {
const collectedUsage: UsageMetadata[] = [
{
input_tokens: 100,
output_tokens: 50,
model: 'claude-3',
cache_creation_input_tokens: 25,
cache_read_input_tokens: 15,
},
];
await recordCollectedUsage(deps, {
...baseParams,
messageId: 'msg-cache-1',
collectedUsage,
});
expect(mockSpendStructuredTokens).toHaveBeenCalledWith(
expect.objectContaining({ messageId: 'msg-cache-1' }),
expect.any(Object),
);
expect(mockSpendTokens).not.toHaveBeenCalled();
});
it('should pass undefined messageId when not provided', async () => {
const collectedUsage: UsageMetadata[] = [
{ input_tokens: 10, output_tokens: 5, model: 'gpt-4' },
];
await recordCollectedUsage(deps, {
user: 'user-123',
conversationId: 'convo-123',
collectedUsage,
});
expect(mockSpendTokens).toHaveBeenCalledWith(
expect.objectContaining({ messageId: undefined }),
expect.any(Object),
);
});
it('should propagate messageId across multiple usage entries', async () => {
const collectedUsage: UsageMetadata[] = [
{ input_tokens: 100, output_tokens: 50, model: 'gpt-4' },
{ input_tokens: 200, output_tokens: 60, model: 'gpt-4' },
{
input_tokens: 150,
output_tokens: 30,
model: 'gpt-4',
input_token_details: { cache_creation: 10, cache_read: 5 },
},
];
await recordCollectedUsage(deps, {
...baseParams,
messageId: 'msg-multi',
collectedUsage,
});
expect(mockSpendTokens).toHaveBeenCalledTimes(2);
expect(mockSpendStructuredTokens).toHaveBeenCalledTimes(1);
for (const call of mockSpendTokens.mock.calls) {
expect(call[0]).toEqual(expect.objectContaining({ messageId: 'msg-multi' }));
}
expect(mockSpendStructuredTokens.mock.calls[0][0]).toEqual(
expect.objectContaining({ messageId: 'msg-multi' }),
);
});
});
});

View file

@ -23,6 +23,7 @@ interface TxMetadata {
user: string;
model?: string;
context: string;
messageId?: string;
conversationId: string;
balance?: Partial<TCustomConfig['balance']> | null;
transactions?: Partial<TTransactionsConfig>;
@ -46,6 +47,7 @@ export interface RecordUsageParams {
collectedUsage: UsageMetadata[];
model?: string;
context?: string;
messageId?: string;
balance?: Partial<TCustomConfig['balance']> | null;
transactions?: Partial<TTransactionsConfig>;
endpointTokenConfig?: EndpointTokenConfig;
@ -68,6 +70,7 @@ export async function recordCollectedUsage(
user,
model,
balance,
messageId,
transactions,
conversationId,
collectedUsage,
@ -108,11 +111,12 @@ export async function recordCollectedUsage(
total_output_tokens += Number(usage.output_tokens) || 0;
const txMetadata: TxMetadata = {
user,
context,
balance,
messageId,
transactions,
conversationId,
user,
endpointTokenConfig,
model: usage.model ?? model,
};