refactor(api): Central Logging 📜 (#1348)

* WIP: initial logging changes
add several transports in ~/config/winston
omit messages in logs, truncate long strings
add short blurb in dotenv for debug logging
GoogleClient: using logger
OpenAIClient: using logger, handleOpenAIErrors
Adding typedef for payload message
bumped winston and using winston-daily-rotate-file
moved config for server paths to ~/config dir
Added `DEBUG_LOGGING=true` to .env.example

* WIP: Refactor logging statements in code

* WIP: Refactor logging statements and import configurations

* WIP: Refactor logging statements and import configurations

* refactor: broadcast Redis initialization message with `info` not `debug`

* refactor: complete Refactor logging statements and import configurations

* chore: delete unused tools

* fix: circular dependencies due to accessing logger

* refactor(handleText): handle booleans and write tests

* refactor: redact sensitive values, better formatting

* chore: improve log formatting, avoid passing strings to 2nd arg

* fix(ci): fix jest tests due to logger changes

* refactor(getAvailablePluginsController): cache plugins as they are static and avoids async addOpenAPISpecs call every time

* chore: update docs

* chore: update docs

* chore: create separate meiliSync logger, clean up logs to avoid being unnecessarily verbose

* chore: spread objects where they are commonly logged to allow string truncation

* chore: improve error log formatting
This commit is contained in:
Danny Avila 2023-12-14 07:49:27 -05:00 committed by GitHub
parent 49571ac635
commit ea1dd59ef4
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
115 changed files with 1271 additions and 1001 deletions

View file

@ -1,6 +1,7 @@
const { ConversationSummaryBufferMemory, ChatMessageHistory } = require('langchain/memory');
const { formatLangChainMessages, SUMMARY_PROMPT } = require('../prompts');
const { predictNewSummary } = require('../chains');
const { logger } = require('~/config');
const createSummaryBufferMemory = ({ llm, prompt, messages, ...rest }) => {
const chatHistory = new ChatMessageHistory(messages);
@ -22,9 +23,8 @@ const summaryBuffer = async ({
prompt = SUMMARY_PROMPT,
signal,
}) => {
if (debug && previous_summary) {
console.log('<-----------PREVIOUS SUMMARY----------->\n\n');
console.log(previous_summary);
if (previous_summary) {
logger.debug('[summaryBuffer]', { previous_summary });
}
const formattedMessages = formatLangChainMessages(context, formatOptions);
@ -46,8 +46,7 @@ const summaryBuffer = async ({
const messages = await chatPromptMemory.chatHistory.getMessages();
if (debug) {
console.log('<-----------SUMMARY BUFFER MESSAGES----------->\n\n');
console.log(JSON.stringify(messages));
logger.debug('[summaryBuffer]', { summary_buffer_messages: messages.length });
}
const predictSummary = await predictNewSummary({
@ -58,8 +57,7 @@ const summaryBuffer = async ({
});
if (debug) {
console.log('<-----------SUMMARY----------->\n\n');
console.log(JSON.stringify(predictSummary));
logger.debug('[summaryBuffer]', { summary: predictSummary });
}
return { role: 'system', content: predictSummary };