mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-21 19:00:13 +01:00
refactor(api): Central Logging 📜 (#1348)
* WIP: initial logging changes add several transports in ~/config/winston omit messages in logs, truncate long strings add short blurb in dotenv for debug logging GoogleClient: using logger OpenAIClient: using logger, handleOpenAIErrors Adding typedef for payload message bumped winston and using winston-daily-rotate-file moved config for server paths to ~/config dir Added `DEBUG_LOGGING=true` to .env.example * WIP: Refactor logging statements in code * WIP: Refactor logging statements and import configurations * WIP: Refactor logging statements and import configurations * refactor: broadcast Redis initialization message with `info` not `debug` * refactor: complete Refactor logging statements and import configurations * chore: delete unused tools * fix: circular dependencies due to accessing logger * refactor(handleText): handle booleans and write tests * refactor: redact sensitive values, better formatting * chore: improve log formatting, avoid passing strings to 2nd arg * fix(ci): fix jest tests due to logger changes * refactor(getAvailablePluginsController): cache plugins as they are static and avoids async addOpenAPISpecs call every time * chore: update docs * chore: update docs * chore: create separate meiliSync logger, clean up logs to avoid being unnecessarily verbose * chore: spread objects where they are commonly logged to allow string truncation * chore: improve error log formatting
This commit is contained in:
parent
49571ac635
commit
ea1dd59ef4
115 changed files with 1271 additions and 1001 deletions
|
|
@ -1,5 +1,6 @@
|
|||
const Keyv = require('keyv');
|
||||
const { KeyvFile } = require('keyv-file');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const addToCache = async ({ endpoint, endpointOption, userMessage, responseMessage }) => {
|
||||
try {
|
||||
|
|
@ -57,7 +58,7 @@ const addToCache = async ({ endpoint, endpointOption, userMessage, responseMessa
|
|||
|
||||
await conversationsCache.set(conversationId, conversation);
|
||||
} catch (error) {
|
||||
console.error('Trouble adding to cache', error);
|
||||
logger.error('[addToCache] Error adding conversation to cache', error);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
const express = require('express');
|
||||
const crypto = require('crypto');
|
||||
const express = require('express');
|
||||
const { saveMessage, getConvoTitle, saveConvo, getConvo } = require('~/models');
|
||||
const { handleError, sendMessage, createOnProgress, handleText } = require('~/server/utils');
|
||||
const { setHeaders } = require('~/server/middleware');
|
||||
const { browserClient } = require('~/app/');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const router = express.Router();
|
||||
const { browserClient } = require('../../../app/');
|
||||
const { saveMessage, getConvoTitle, saveConvo, getConvo } = require('../../../models');
|
||||
const { handleError, sendMessage, createOnProgress, handleText } = require('../../utils');
|
||||
const { setHeaders } = require('../../middleware');
|
||||
|
||||
router.post('/', setHeaders, async (req, res) => {
|
||||
const {
|
||||
|
|
@ -41,10 +43,10 @@ router.post('/', setHeaders, async (req, res) => {
|
|||
key: req.body?.key ?? null,
|
||||
};
|
||||
|
||||
console.log('ask log', {
|
||||
logger.debug('[/ask/chatGPTBrowser]', {
|
||||
userMessage,
|
||||
endpointOption,
|
||||
conversationId,
|
||||
...endpointOption,
|
||||
});
|
||||
|
||||
if (!overrideParentMessageId) {
|
||||
|
|
@ -136,7 +138,7 @@ const ask = async ({
|
|||
},
|
||||
});
|
||||
|
||||
console.log('CLIENT RESPONSE', response);
|
||||
logger.debug('[/ask/chatGPTBrowser]', response);
|
||||
|
||||
const newConversationId = response.conversationId || conversationId;
|
||||
const newUserMassageId = response.parentMessageId || userMessageId;
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
const express = require('express');
|
||||
const crypto = require('crypto');
|
||||
const { handleError, sendMessage, createOnProgress, handleText } = require('~/server/utils');
|
||||
const { saveMessage, getConvoTitle, saveConvo, getConvo } = require('~/models');
|
||||
const { setHeaders } = require('~/server/middleware');
|
||||
const { titleConvoBing, askBing } = require('~/app');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const router = express.Router();
|
||||
const { titleConvoBing, askBing } = require('../../../app');
|
||||
const { saveMessage, getConvoTitle, saveConvo, getConvo } = require('../../../models');
|
||||
const { handleError, sendMessage, createOnProgress, handleText } = require('../../utils');
|
||||
const { setHeaders } = require('../../middleware');
|
||||
|
||||
router.post('/', setHeaders, async (req, res) => {
|
||||
const {
|
||||
|
|
@ -60,7 +62,7 @@ router.post('/', setHeaders, async (req, res) => {
|
|||
};
|
||||
}
|
||||
|
||||
console.log('ask log', {
|
||||
logger.debug('[/ask/bingAI] ask log', {
|
||||
userMessage,
|
||||
endpointOption,
|
||||
conversationId,
|
||||
|
|
@ -153,10 +155,10 @@ const ask = async ({
|
|||
abortController,
|
||||
});
|
||||
|
||||
console.log('BING RESPONSE', response);
|
||||
logger.debug('[/ask/bingAI] BING RESPONSE', response);
|
||||
|
||||
if (response.details && response.details.scores) {
|
||||
console.log('SCORES', response.details.scores);
|
||||
logger.debug('[/ask/bingAI] SCORES', response.details.scores);
|
||||
}
|
||||
|
||||
const newConversationId = endpointOption?.jailbreak
|
||||
|
|
@ -250,7 +252,7 @@ const ask = async ({
|
|||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
logger.error('[/ask/bingAI] Error handling BingAI response', error);
|
||||
const partialText = getPartialText();
|
||||
if (partialText?.length > 2) {
|
||||
const responseMessage = {
|
||||
|
|
@ -276,7 +278,7 @@ const ask = async ({
|
|||
responseMessage: responseMessage,
|
||||
};
|
||||
} else {
|
||||
console.log(error);
|
||||
logger.error('[/ask/bingAI] Error handling BingAI response', error);
|
||||
const errorMessage = {
|
||||
messageId: responseMessageId,
|
||||
sender: model,
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ const {
|
|||
validateEndpoint,
|
||||
buildEndpointOption,
|
||||
} = require('~/server/middleware');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
router.post('/abort', handleAbort());
|
||||
|
||||
|
|
@ -25,8 +26,7 @@ router.post('/', validateEndpoint, buildEndpointOption, setHeaders, async (req,
|
|||
parentMessageId = null,
|
||||
overrideParentMessageId = null,
|
||||
} = req.body;
|
||||
console.log('ask log');
|
||||
console.dir({ text, conversationId, endpointOption }, { depth: null });
|
||||
logger.debug('[/ask/gptPlugins]', { text, conversationId, ...endpointOption });
|
||||
let metadata;
|
||||
let userMessage;
|
||||
let promptTokens;
|
||||
|
|
@ -189,8 +189,8 @@ router.post('/', validateEndpoint, buildEndpointOption, setHeaders, async (req,
|
|||
response = { ...response, ...metadata };
|
||||
}
|
||||
|
||||
console.log('CLIENT RESPONSE');
|
||||
console.dir(response, { depth: null });
|
||||
logger.debug('[/ask/gptPlugins]', response);
|
||||
|
||||
response.plugins = plugins.map((p) => ({ ...p, loading: false }));
|
||||
await saveMessage({ ...response, user });
|
||||
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ const {
|
|||
validateEndpoint,
|
||||
buildEndpointOption,
|
||||
} = require('~/server/middleware');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
router.post('/abort', handleAbort());
|
||||
|
||||
|
|
@ -23,8 +24,9 @@ router.post('/', validateEndpoint, buildEndpointOption, setHeaders, async (req,
|
|||
parentMessageId = null,
|
||||
overrideParentMessageId = null,
|
||||
} = req.body;
|
||||
console.log('ask log');
|
||||
console.dir({ text, conversationId, endpointOption }, { depth: null });
|
||||
|
||||
logger.debug('[/ask/openAI]', { text, conversationId, ...endpointOption });
|
||||
|
||||
let metadata;
|
||||
let userMessage;
|
||||
let promptTokens;
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue