mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-17 08:50:15 +01:00
* wip: initial cache control implementation, add typing for transactions handling * feat: first pass of Anthropic Prompt Caching * feat: standardize stream usage as pass in when calculating token counts * feat: Add getCacheMultiplier function to calculate cache multiplier for different valueKeys and cacheTypes * chore: imports order * refactor: token usage recording in AnthropicClient, no need to "correct" as we have the correct amount * feat: more accurate token counting using stream usage data * feat: Improve token counting accuracy with stream usage data * refactor: ensure more accurate than not token estimations if custom instructions or files are not being resent with every request * refactor: cleanup updateUserMessageTokenCount to allow transactions to be as accurate as possible even if we shouldn't update user message token counts * ci: fix tests
21 lines
683 B
JavaScript
21 lines
683 B
JavaScript
const addCacheControl = require('./addCacheControl');
|
|
const formatMessages = require('./formatMessages');
|
|
const summaryPrompts = require('./summaryPrompts');
|
|
const handleInputs = require('./handleInputs');
|
|
const instructions = require('./instructions');
|
|
const titlePrompts = require('./titlePrompts');
|
|
const truncateText = require('./truncateText');
|
|
const createVisionPrompt = require('./createVisionPrompt');
|
|
const createContextHandlers = require('./createContextHandlers');
|
|
|
|
module.exports = {
|
|
addCacheControl,
|
|
...formatMessages,
|
|
...summaryPrompts,
|
|
...handleInputs,
|
|
...instructions,
|
|
...titlePrompts,
|
|
...truncateText,
|
|
createVisionPrompt,
|
|
createContextHandlers,
|
|
};
|