mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-16 16:30:15 +01:00
refactor(api): Central Logging 📜 (#1348)
* WIP: initial logging changes add several transports in ~/config/winston omit messages in logs, truncate long strings add short blurb in dotenv for debug logging GoogleClient: using logger OpenAIClient: using logger, handleOpenAIErrors Adding typedef for payload message bumped winston and using winston-daily-rotate-file moved config for server paths to ~/config dir Added `DEBUG_LOGGING=true` to .env.example * WIP: Refactor logging statements in code * WIP: Refactor logging statements and import configurations * WIP: Refactor logging statements and import configurations * refactor: broadcast Redis initialization message with `info` not `debug` * refactor: complete Refactor logging statements and import configurations * chore: delete unused tools * fix: circular dependencies due to accessing logger * refactor(handleText): handle booleans and write tests * refactor: redact sensitive values, better formatting * chore: improve log formatting, avoid passing strings to 2nd arg * fix(ci): fix jest tests due to logger changes * refactor(getAvailablePluginsController): cache plugins as they are static and avoids async addOpenAPISpecs call every time * chore: update docs * chore: update docs * chore: create separate meiliSync logger, clean up logs to avoid being unnecessarily verbose * chore: spread objects where they are commonly logged to allow string truncation * chore: improve error log formatting
This commit is contained in:
parent
49571ac635
commit
ea1dd59ef4
115 changed files with 1271 additions and 1001 deletions
|
|
@ -16,6 +16,8 @@
|
||||||
APP_TITLE=LibreChat
|
APP_TITLE=LibreChat
|
||||||
# CUSTOM_FOOTER="My custom footer"
|
# CUSTOM_FOOTER="My custom footer"
|
||||||
|
|
||||||
|
DEBUG_LOGGING=true
|
||||||
|
|
||||||
HOST=localhost
|
HOST=localhost
|
||||||
PORT=3080
|
PORT=3080
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
require('dotenv').config();
|
require('dotenv').config();
|
||||||
const { KeyvFile } = require('keyv-file');
|
const { KeyvFile } = require('keyv-file');
|
||||||
const { getUserKey, checkUserKeyExpiry } = require('../server/services/UserService');
|
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const askBing = async ({
|
const askBing = async ({
|
||||||
text,
|
text,
|
||||||
|
|
@ -100,7 +101,7 @@ const askBing = async ({
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('bing options', options);
|
logger.debug('bing options', options);
|
||||||
|
|
||||||
const res = await bingAIClient.sendMessage(text, options);
|
const res = await bingAIClient.sendMessage(text, options);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,7 @@ const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = requ
|
||||||
const { getResponseSender, EModelEndpoint } = require('librechat-data-provider');
|
const { getResponseSender, EModelEndpoint } = require('librechat-data-provider');
|
||||||
const { getModelMaxTokens } = require('~/utils');
|
const { getModelMaxTokens } = require('~/utils');
|
||||||
const BaseClient = require('./BaseClient');
|
const BaseClient = require('./BaseClient');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const HUMAN_PROMPT = '\n\nHuman:';
|
const HUMAN_PROMPT = '\n\nHuman:';
|
||||||
const AI_PROMPT = '\n\nAssistant:';
|
const AI_PROMPT = '\n\nAssistant:';
|
||||||
|
|
@ -103,9 +104,8 @@ class AnthropicClient extends BaseClient {
|
||||||
messages,
|
messages,
|
||||||
parentMessageId,
|
parentMessageId,
|
||||||
});
|
});
|
||||||
if (this.options.debug) {
|
|
||||||
console.debug('AnthropicClient: orderedMessages', orderedMessages, parentMessageId);
|
logger.debug('[AnthropicClient] orderedMessages', { orderedMessages, parentMessageId });
|
||||||
}
|
|
||||||
|
|
||||||
const formattedMessages = orderedMessages.map((message) => ({
|
const formattedMessages = orderedMessages.map((message) => ({
|
||||||
author: message.isCreatedByUser ? this.userLabel : this.assistantLabel,
|
author: message.isCreatedByUser ? this.userLabel : this.assistantLabel,
|
||||||
|
|
@ -247,7 +247,7 @@ class AnthropicClient extends BaseClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
getCompletion() {
|
getCompletion() {
|
||||||
console.log('AnthropicClient doesn\'t use getCompletion (all handled in sendCompletion)');
|
logger.debug('AnthropicClient doesn\'t use getCompletion (all handled in sendCompletion)');
|
||||||
}
|
}
|
||||||
|
|
||||||
async sendCompletion(payload, { onProgress, abortController }) {
|
async sendCompletion(payload, { onProgress, abortController }) {
|
||||||
|
|
@ -262,12 +262,7 @@ class AnthropicClient extends BaseClient {
|
||||||
modelOptions.stream = true;
|
modelOptions.stream = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
const { debug } = this.options;
|
logger.debug('modelOptions', { modelOptions });
|
||||||
if (debug) {
|
|
||||||
console.debug();
|
|
||||||
console.debug(modelOptions);
|
|
||||||
console.debug();
|
|
||||||
}
|
|
||||||
|
|
||||||
const client = this.getClient();
|
const client = this.getClient();
|
||||||
const metadata = {
|
const metadata = {
|
||||||
|
|
@ -295,32 +290,23 @@ class AnthropicClient extends BaseClient {
|
||||||
top_p,
|
top_p,
|
||||||
top_k,
|
top_k,
|
||||||
};
|
};
|
||||||
if (this.options.debug) {
|
logger.debug('[AnthropicClient]', { requestOptions });
|
||||||
console.log('AnthropicClient: requestOptions');
|
|
||||||
console.dir(requestOptions, { depth: null });
|
|
||||||
}
|
|
||||||
const response = await client.completions.create(requestOptions);
|
const response = await client.completions.create(requestOptions);
|
||||||
|
|
||||||
signal.addEventListener('abort', () => {
|
signal.addEventListener('abort', () => {
|
||||||
if (this.options.debug) {
|
logger.debug('[AnthropicClient] message aborted!');
|
||||||
console.log('AnthropicClient: message aborted!');
|
|
||||||
}
|
|
||||||
response.controller.abort();
|
response.controller.abort();
|
||||||
});
|
});
|
||||||
|
|
||||||
for await (const completion of response) {
|
for await (const completion of response) {
|
||||||
if (this.options.debug) {
|
// Uncomment to debug message stream
|
||||||
// Uncomment to debug message stream
|
// logger.debug(completion);
|
||||||
// console.debug(completion);
|
|
||||||
}
|
|
||||||
text += completion.completion;
|
text += completion.completion;
|
||||||
onProgress(completion.completion);
|
onProgress(completion.completion);
|
||||||
}
|
}
|
||||||
|
|
||||||
signal.removeEventListener('abort', () => {
|
signal.removeEventListener('abort', () => {
|
||||||
if (this.options.debug) {
|
logger.debug('[AnthropicClient] message aborted!');
|
||||||
console.log('AnthropicClient: message aborted!');
|
|
||||||
}
|
|
||||||
response.controller.abort();
|
response.controller.abort();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -336,9 +322,7 @@ class AnthropicClient extends BaseClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
getBuildMessagesOptions() {
|
getBuildMessagesOptions() {
|
||||||
if (this.options.debug) {
|
logger.debug('AnthropicClient doesn\'t use getBuildMessagesOptions');
|
||||||
console.log('AnthropicClient doesn\'t use getBuildMessagesOptions');
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static getTokenizer(encoding, isModelName = false, extendSpecialTokens = {}) {
|
static getTokenizer(encoding, isModelName = false, extendSpecialTokens = {}) {
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,7 @@ const TextStream = require('./TextStream');
|
||||||
const { getConvo, getMessages, saveMessage, updateMessage, saveConvo } = require('~/models');
|
const { getConvo, getMessages, saveMessage, updateMessage, saveConvo } = require('~/models');
|
||||||
const { addSpaceIfNeeded, isEnabled } = require('~/server/utils');
|
const { addSpaceIfNeeded, isEnabled } = require('~/server/utils');
|
||||||
const checkBalance = require('~/models/checkBalance');
|
const checkBalance = require('~/models/checkBalance');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
class BaseClient {
|
class BaseClient {
|
||||||
constructor(apiKey, options = {}) {
|
constructor(apiKey, options = {}) {
|
||||||
|
|
@ -41,15 +42,14 @@ class BaseClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
async getTokenCountForResponse(response) {
|
async getTokenCountForResponse(response) {
|
||||||
if (this.options.debug) {
|
logger.debug('`[BaseClient] recordTokenUsage` not implemented.', response);
|
||||||
console.debug('`recordTokenUsage` not implemented.', response);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async recordTokenUsage({ promptTokens, completionTokens }) {
|
async recordTokenUsage({ promptTokens, completionTokens }) {
|
||||||
if (this.options.debug) {
|
logger.debug('`[BaseClient] recordTokenUsage` not implemented.', {
|
||||||
console.debug('`recordTokenUsage` not implemented.', { promptTokens, completionTokens });
|
promptTokens,
|
||||||
}
|
completionTokens,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
getBuildMessagesOptions() {
|
getBuildMessagesOptions() {
|
||||||
|
|
@ -194,14 +194,14 @@ class BaseClient {
|
||||||
const update = {};
|
const update = {};
|
||||||
|
|
||||||
if (messageId === tokenCountMap.summaryMessage?.messageId) {
|
if (messageId === tokenCountMap.summaryMessage?.messageId) {
|
||||||
this.options.debug && console.debug(`Adding summary props to ${messageId}.`);
|
logger.debug(`[BaseClient] Adding summary props to ${messageId}.`);
|
||||||
|
|
||||||
update.summary = tokenCountMap.summaryMessage.content;
|
update.summary = tokenCountMap.summaryMessage.content;
|
||||||
update.summaryTokenCount = tokenCountMap.summaryMessage.tokenCount;
|
update.summaryTokenCount = tokenCountMap.summaryMessage.tokenCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (message.tokenCount && !update.summaryTokenCount) {
|
if (message.tokenCount && !update.summaryTokenCount) {
|
||||||
this.options.debug && console.debug(`Skipping ${messageId}: already had a token count.`);
|
logger.debug(`[BaseClient] Skipping ${messageId}: already had a token count.`);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -278,19 +278,17 @@ class BaseClient {
|
||||||
if (instructions) {
|
if (instructions) {
|
||||||
({ tokenCount, ..._instructions } = instructions);
|
({ tokenCount, ..._instructions } = instructions);
|
||||||
}
|
}
|
||||||
this.options.debug && _instructions && console.debug('instructions tokenCount', tokenCount);
|
_instructions && logger.debug('[BaseClient] instructions tokenCount: ' + tokenCount);
|
||||||
let payload = this.addInstructions(formattedMessages, _instructions);
|
let payload = this.addInstructions(formattedMessages, _instructions);
|
||||||
let orderedWithInstructions = this.addInstructions(orderedMessages, instructions);
|
let orderedWithInstructions = this.addInstructions(orderedMessages, instructions);
|
||||||
|
|
||||||
let { context, remainingContextTokens, messagesToRefine, summaryIndex } =
|
let { context, remainingContextTokens, messagesToRefine, summaryIndex } =
|
||||||
await this.getMessagesWithinTokenLimit(orderedWithInstructions);
|
await this.getMessagesWithinTokenLimit(orderedWithInstructions);
|
||||||
|
|
||||||
this.options.debug &&
|
logger.debug('[BaseClient] Context Count (1/2)', {
|
||||||
console.debug(
|
remainingContextTokens,
|
||||||
'remainingContextTokens, this.maxContextTokens (1/2)',
|
maxContextTokens: this.maxContextTokens,
|
||||||
remainingContextTokens,
|
});
|
||||||
this.maxContextTokens,
|
|
||||||
);
|
|
||||||
|
|
||||||
let summaryMessage;
|
let summaryMessage;
|
||||||
let summaryTokenCount;
|
let summaryTokenCount;
|
||||||
|
|
@ -308,10 +306,9 @@ class BaseClient {
|
||||||
|
|
||||||
if (diff > 0) {
|
if (diff > 0) {
|
||||||
payload = payload.slice(diff);
|
payload = payload.slice(diff);
|
||||||
this.options.debug &&
|
logger.debug(
|
||||||
console.debug(
|
`[BaseClient] Difference between original payload (${length}) and context (${context.length}): ${diff}`,
|
||||||
`Difference between original payload (${length}) and context (${context.length}): ${diff}`,
|
);
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const latestMessage = orderedWithInstructions[orderedWithInstructions.length - 1];
|
const latestMessage = orderedWithInstructions[orderedWithInstructions.length - 1];
|
||||||
|
|
@ -338,12 +335,10 @@ class BaseClient {
|
||||||
// Make sure to only continue summarization logic if the summary message was generated
|
// Make sure to only continue summarization logic if the summary message was generated
|
||||||
shouldSummarize = summaryMessage && shouldSummarize;
|
shouldSummarize = summaryMessage && shouldSummarize;
|
||||||
|
|
||||||
this.options.debug &&
|
logger.debug('[BaseClient] Context Count (2/2)', {
|
||||||
console.debug(
|
remainingContextTokens,
|
||||||
'remainingContextTokens, this.maxContextTokens (2/2)',
|
maxContextTokens: this.maxContextTokens,
|
||||||
remainingContextTokens,
|
});
|
||||||
this.maxContextTokens,
|
|
||||||
);
|
|
||||||
|
|
||||||
let tokenCountMap = orderedWithInstructions.reduce((map, message, index) => {
|
let tokenCountMap = orderedWithInstructions.reduce((map, message, index) => {
|
||||||
const { messageId } = message;
|
const { messageId } = message;
|
||||||
|
|
@ -361,19 +356,13 @@ class BaseClient {
|
||||||
|
|
||||||
const promptTokens = this.maxContextTokens - remainingContextTokens;
|
const promptTokens = this.maxContextTokens - remainingContextTokens;
|
||||||
|
|
||||||
if (this.options.debug) {
|
logger.debug('[BaseClient] Payload size:', payload.length);
|
||||||
console.debug('<-------------------------PAYLOAD/TOKEN COUNT MAP------------------------->');
|
logger.debug('[BaseClient] tokenCountMap:', tokenCountMap);
|
||||||
console.debug('Payload:', payload);
|
logger.debug('[BaseClient]', {
|
||||||
console.debug('Token Count Map:', tokenCountMap);
|
promptTokens,
|
||||||
console.debug(
|
remainingContextTokens,
|
||||||
'Prompt Tokens',
|
maxContextTokens: this.maxContextTokens,
|
||||||
promptTokens,
|
});
|
||||||
'remainingContextTokens',
|
|
||||||
remainingContextTokens,
|
|
||||||
'this.maxContextTokens',
|
|
||||||
this.maxContextTokens,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return { payload, tokenCountMap, promptTokens, messages: orderedWithInstructions };
|
return { payload, tokenCountMap, promptTokens, messages: orderedWithInstructions };
|
||||||
}
|
}
|
||||||
|
|
@ -421,11 +410,11 @@ class BaseClient {
|
||||||
);
|
);
|
||||||
|
|
||||||
if (tokenCountMap) {
|
if (tokenCountMap) {
|
||||||
console.dir(tokenCountMap, { depth: null });
|
logger.debug('[BaseClient] tokenCountMap', tokenCountMap);
|
||||||
if (tokenCountMap[userMessage.messageId]) {
|
if (tokenCountMap[userMessage.messageId]) {
|
||||||
userMessage.tokenCount = tokenCountMap[userMessage.messageId];
|
userMessage.tokenCount = tokenCountMap[userMessage.messageId];
|
||||||
console.log('userMessage.tokenCount', userMessage.tokenCount);
|
logger.debug('[BaseClient] userMessage.tokenCount', userMessage.tokenCount);
|
||||||
console.log('userMessage', userMessage);
|
logger.debug('[BaseClient] userMessage', userMessage);
|
||||||
}
|
}
|
||||||
|
|
||||||
this.handleTokenCountMap(tokenCountMap);
|
this.handleTokenCountMap(tokenCountMap);
|
||||||
|
|
@ -443,7 +432,6 @@ class BaseClient {
|
||||||
user: this.user,
|
user: this.user,
|
||||||
tokenType: 'prompt',
|
tokenType: 'prompt',
|
||||||
amount: promptTokens,
|
amount: promptTokens,
|
||||||
debug: this.options.debug,
|
|
||||||
model: this.modelOptions.model,
|
model: this.modelOptions.model,
|
||||||
endpoint: this.options.endpoint,
|
endpoint: this.options.endpoint,
|
||||||
},
|
},
|
||||||
|
|
@ -483,9 +471,7 @@ class BaseClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
async loadHistory(conversationId, parentMessageId = null) {
|
async loadHistory(conversationId, parentMessageId = null) {
|
||||||
if (this.options.debug) {
|
logger.debug('[BaseClient] Loading history:', { conversationId, parentMessageId });
|
||||||
console.debug('Loading history for conversation', conversationId, parentMessageId);
|
|
||||||
}
|
|
||||||
|
|
||||||
const messages = (await getMessages({ conversationId })) ?? [];
|
const messages = (await getMessages({ conversationId })) ?? [];
|
||||||
|
|
||||||
|
|
@ -516,9 +502,14 @@ class BaseClient {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.options.debug && this.previous_summary) {
|
if (this.previous_summary) {
|
||||||
const { messageId, summary, tokenCount, summaryTokenCount } = this.previous_summary;
|
const { messageId, summary, tokenCount, summaryTokenCount } = this.previous_summary;
|
||||||
console.debug('Previous summary:', { messageId, summary, tokenCount, summaryTokenCount });
|
logger.debug('[BaseClient] Previous summary:', {
|
||||||
|
messageId,
|
||||||
|
summary,
|
||||||
|
tokenCount,
|
||||||
|
summaryTokenCount,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
return orderedMessages;
|
return orderedMessages;
|
||||||
|
|
|
||||||
|
|
@ -8,6 +8,7 @@ const { getResponseSender, EModelEndpoint, endpointSettings } = require('librech
|
||||||
const { getModelMaxTokens } = require('~/utils');
|
const { getModelMaxTokens } = require('~/utils');
|
||||||
const { formatMessage } = require('./prompts');
|
const { formatMessage } = require('./prompts');
|
||||||
const BaseClient = require('./BaseClient');
|
const BaseClient = require('./BaseClient');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const loc = 'us-central1';
|
const loc = 'us-central1';
|
||||||
const publisher = 'google';
|
const publisher = 'google';
|
||||||
|
|
@ -42,8 +43,7 @@ class GoogleClient extends BaseClient {
|
||||||
|
|
||||||
jwtClient.authorize((err) => {
|
jwtClient.authorize((err) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
console.error('Error: jwtClient failed to authorize');
|
logger.error('jwtClient failed to authorize', err);
|
||||||
console.error(err.message);
|
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
@ -58,11 +58,9 @@ class GoogleClient extends BaseClient {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
jwtClient.authorize((err, tokens) => {
|
jwtClient.authorize((err, tokens) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
console.error('Error: jwtClient failed to authorize');
|
logger.error('jwtClient failed to authorize', err);
|
||||||
console.error(err.message);
|
|
||||||
reject(err);
|
reject(err);
|
||||||
} else {
|
} else {
|
||||||
console.log('Access Token:', tokens.access_token);
|
|
||||||
resolve(tokens.access_token);
|
resolve(tokens.access_token);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
@ -213,8 +211,7 @@ class GoogleClient extends BaseClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.options.debug) {
|
if (this.options.debug) {
|
||||||
console.debug('GoogleClient buildMessages');
|
logger.debug('GoogleClient buildMessages', payload);
|
||||||
console.dir(payload, { depth: null });
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return { prompt: payload };
|
return { prompt: payload };
|
||||||
|
|
@ -226,7 +223,10 @@ class GoogleClient extends BaseClient {
|
||||||
parentMessageId,
|
parentMessageId,
|
||||||
});
|
});
|
||||||
if (this.options.debug) {
|
if (this.options.debug) {
|
||||||
console.debug('GoogleClient: orderedMessages', orderedMessages, parentMessageId);
|
logger.debug('GoogleClient: orderedMessages, parentMessageId', {
|
||||||
|
orderedMessages,
|
||||||
|
parentMessageId,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const formattedMessages = orderedMessages.map((message) => ({
|
const formattedMessages = orderedMessages.map((message) => ({
|
||||||
|
|
@ -377,10 +377,7 @@ class GoogleClient extends BaseClient {
|
||||||
const { debug } = this.options;
|
const { debug } = this.options;
|
||||||
const url = this.completionsUrl;
|
const url = this.completionsUrl;
|
||||||
if (debug) {
|
if (debug) {
|
||||||
console.debug();
|
logger.debug('GoogleClient _getCompletion', { url, payload });
|
||||||
console.debug(url);
|
|
||||||
console.debug(this.modelOptions);
|
|
||||||
console.debug();
|
|
||||||
}
|
}
|
||||||
const opts = {
|
const opts = {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
|
|
@ -397,7 +394,7 @@ class GoogleClient extends BaseClient {
|
||||||
|
|
||||||
const client = await this.getClient();
|
const client = await this.getClient();
|
||||||
const res = await client.request({ url, method: 'POST', data: payload });
|
const res = await client.request({ url, method: 'POST', data: payload });
|
||||||
console.dir(res.data, { depth: null });
|
logger.debug('GoogleClient _getCompletion', { res });
|
||||||
return res.data;
|
return res.data;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -476,7 +473,7 @@ class GoogleClient extends BaseClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
getBuildMessagesOptions() {
|
getBuildMessagesOptions() {
|
||||||
// console.log('GoogleClient doesn\'t use getBuildMessagesOptions');
|
// logger.debug('GoogleClient doesn\'t use getBuildMessagesOptions');
|
||||||
}
|
}
|
||||||
|
|
||||||
async sendCompletion(payload, opts = {}) {
|
async sendCompletion(payload, opts = {}) {
|
||||||
|
|
@ -484,13 +481,10 @@ class GoogleClient extends BaseClient {
|
||||||
try {
|
try {
|
||||||
reply = await this.getCompletion(payload, opts);
|
reply = await this.getCompletion(payload, opts);
|
||||||
if (this.options.debug) {
|
if (this.options.debug) {
|
||||||
console.debug('result');
|
logger.debug('GoogleClient sendCompletion', { reply });
|
||||||
console.debug(reply);
|
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Error: failed to send completion to Google');
|
logger.error('failed to send completion to Google', err);
|
||||||
console.error(err);
|
|
||||||
console.error(err.message);
|
|
||||||
}
|
}
|
||||||
return reply.trim();
|
return reply.trim();
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -14,6 +14,7 @@ const { summaryBuffer } = require('./memory');
|
||||||
const { runTitleChain } = require('./chains');
|
const { runTitleChain } = require('./chains');
|
||||||
const { tokenSplit } = require('./document');
|
const { tokenSplit } = require('./document');
|
||||||
const BaseClient = require('./BaseClient');
|
const BaseClient = require('./BaseClient');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
// Cache to store Tiktoken instances
|
// Cache to store Tiktoken instances
|
||||||
const tokenizersCache = {};
|
const tokenizersCache = {};
|
||||||
|
|
@ -123,7 +124,7 @@ class OpenAIClient extends BaseClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.options.debug) {
|
if (this.options.debug) {
|
||||||
console.debug('maxContextTokens', this.maxContextTokens);
|
logger.debug('[OpenAIClient] maxContextTokens', this.maxContextTokens);
|
||||||
}
|
}
|
||||||
|
|
||||||
this.maxResponseTokens = this.modelOptions.max_tokens || 1024;
|
this.maxResponseTokens = this.modelOptions.max_tokens || 1024;
|
||||||
|
|
@ -175,7 +176,7 @@ class OpenAIClient extends BaseClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.azureEndpoint && this.options.debug) {
|
if (this.azureEndpoint && this.options.debug) {
|
||||||
console.debug('Using Azure endpoint');
|
logger.debug('Using Azure endpoint');
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.useOpenRouter) {
|
if (this.useOpenRouter) {
|
||||||
|
|
@ -254,8 +255,7 @@ class OpenAIClient extends BaseClient {
|
||||||
// Reset count
|
// Reset count
|
||||||
tokenizerCallsCount = 1;
|
tokenizerCallsCount = 1;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log('Free and reset encoders error');
|
logger.error('[OpenAIClient] Free and reset encoders error', error);
|
||||||
console.error(error);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -263,7 +263,7 @@ class OpenAIClient extends BaseClient {
|
||||||
resetTokenizersIfNecessary() {
|
resetTokenizersIfNecessary() {
|
||||||
if (tokenizerCallsCount >= 25) {
|
if (tokenizerCallsCount >= 25) {
|
||||||
if (this.options.debug) {
|
if (this.options.debug) {
|
||||||
console.debug('freeAndResetAllEncoders: reached 25 encodings, resetting...');
|
logger.debug('[OpenAIClient] freeAndResetAllEncoders: reached 25 encodings, resetting...');
|
||||||
}
|
}
|
||||||
this.constructor.freeAndResetAllEncoders();
|
this.constructor.freeAndResetAllEncoders();
|
||||||
}
|
}
|
||||||
|
|
@ -403,11 +403,6 @@ class OpenAIClient extends BaseClient {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.options.debug) {
|
|
||||||
// console.debug('progressMessage');
|
|
||||||
// console.dir(progressMessage, { depth: null });
|
|
||||||
}
|
|
||||||
|
|
||||||
if (progressMessage.choices) {
|
if (progressMessage.choices) {
|
||||||
streamResult = progressMessage;
|
streamResult = progressMessage;
|
||||||
}
|
}
|
||||||
|
|
@ -427,9 +422,7 @@ class OpenAIClient extends BaseClient {
|
||||||
if (!token) {
|
if (!token) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (this.options.debug) {
|
|
||||||
// console.debug(token);
|
|
||||||
}
|
|
||||||
if (token === this.endToken) {
|
if (token === this.endToken) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
@ -451,9 +444,9 @@ class OpenAIClient extends BaseClient {
|
||||||
null,
|
null,
|
||||||
opts.abortController || new AbortController(),
|
opts.abortController || new AbortController(),
|
||||||
);
|
);
|
||||||
if (this.options.debug) {
|
|
||||||
console.debug(JSON.stringify(result));
|
logger.debug('[OpenAIClient] sendCompletion: result', result);
|
||||||
}
|
|
||||||
if (this.isChatCompletion) {
|
if (this.isChatCompletion) {
|
||||||
reply = result.choices[0].message.content;
|
reply = result.choices[0].message.content;
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -557,11 +550,13 @@ class OpenAIClient extends BaseClient {
|
||||||
title = await runTitleChain({ llm, text, convo, signal: this.abortController.signal });
|
title = await runTitleChain({ llm, text, convo, signal: this.abortController.signal });
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
if (e?.message?.toLowerCase()?.includes('abort')) {
|
if (e?.message?.toLowerCase()?.includes('abort')) {
|
||||||
this.options.debug && console.debug('Aborted title generation');
|
logger.debug('[OpenAIClient] Aborted title generation');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
console.log('There was an issue generating title with LangChain, trying the old method...');
|
logger.error(
|
||||||
this.options.debug && console.error(e.message, e);
|
'[OpenAIClient] There was an issue generating title with LangChain, trying the old method...',
|
||||||
|
e,
|
||||||
|
);
|
||||||
modelOptions.model = OPENAI_TITLE_MODEL ?? 'gpt-3.5-turbo';
|
modelOptions.model = OPENAI_TITLE_MODEL ?? 'gpt-3.5-turbo';
|
||||||
if (this.azure) {
|
if (this.azure) {
|
||||||
modelOptions.model = process.env.AZURE_OPENAI_DEFAULT_MODEL ?? modelOptions.model;
|
modelOptions.model = process.env.AZURE_OPENAI_DEFAULT_MODEL ?? modelOptions.model;
|
||||||
|
|
@ -582,17 +577,16 @@ ${convo}
|
||||||
try {
|
try {
|
||||||
title = (await this.sendPayload(instructionsPayload, { modelOptions })).replaceAll('"', '');
|
title = (await this.sendPayload(instructionsPayload, { modelOptions })).replaceAll('"', '');
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error(e);
|
logger.error('[OpenAIClient] There was another issue generating the title', e);
|
||||||
console.log('There was another issue generating the title, see error above.');
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('CONVERSATION TITLE', title);
|
logger.debug('[OpenAIClient] Convo Title: ' + title);
|
||||||
return title;
|
return title;
|
||||||
}
|
}
|
||||||
|
|
||||||
async summarizeMessages({ messagesToRefine, remainingContextTokens }) {
|
async summarizeMessages({ messagesToRefine, remainingContextTokens }) {
|
||||||
this.options.debug && console.debug('Summarizing messages...');
|
logger.debug('[OpenAIClient] Summarizing messages...');
|
||||||
let context = messagesToRefine;
|
let context = messagesToRefine;
|
||||||
let prompt;
|
let prompt;
|
||||||
|
|
||||||
|
|
@ -615,8 +609,9 @@ ${convo}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (context.length === 0) {
|
if (context.length === 0) {
|
||||||
this.options.debug &&
|
logger.debug(
|
||||||
console.debug('Summary context is empty, using latest message within token limit');
|
'[OpenAIClient] Summary context is empty, using latest message within token limit',
|
||||||
|
);
|
||||||
|
|
||||||
promptBuffer = 32;
|
promptBuffer = 32;
|
||||||
const { text, ...latestMessage } = messagesToRefine[messagesToRefine.length - 1];
|
const { text, ...latestMessage } = messagesToRefine[messagesToRefine.length - 1];
|
||||||
|
|
@ -643,7 +638,7 @@ ${convo}
|
||||||
// by recreating the summary prompt (single message) to avoid LangChain handling
|
// by recreating the summary prompt (single message) to avoid LangChain handling
|
||||||
|
|
||||||
const initialPromptTokens = this.maxContextTokens - remainingContextTokens;
|
const initialPromptTokens = this.maxContextTokens - remainingContextTokens;
|
||||||
this.options.debug && console.debug(`initialPromptTokens: ${initialPromptTokens}`);
|
logger.debug('[OpenAIClient] initialPromptTokens', initialPromptTokens);
|
||||||
|
|
||||||
const llm = this.initializeLLM({
|
const llm = this.initializeLLM({
|
||||||
model: OPENAI_SUMMARY_MODEL,
|
model: OPENAI_SUMMARY_MODEL,
|
||||||
|
|
@ -669,9 +664,9 @@ ${convo}
|
||||||
const summaryTokenCount = this.getTokenCountForMessage(summaryMessage);
|
const summaryTokenCount = this.getTokenCountForMessage(summaryMessage);
|
||||||
|
|
||||||
if (this.options.debug) {
|
if (this.options.debug) {
|
||||||
console.debug('summaryMessage:', summaryMessage);
|
logger.debug('[OpenAIClient] summaryTokenCount', summaryTokenCount);
|
||||||
console.debug(
|
logger.debug(
|
||||||
`remainingContextTokens: ${remainingContextTokens}, after refining: ${
|
`[OpenAIClient] Summarization complete: remainingContextTokens: ${remainingContextTokens}, after refining: ${
|
||||||
remainingContextTokens - summaryTokenCount
|
remainingContextTokens - summaryTokenCount
|
||||||
}`,
|
}`,
|
||||||
);
|
);
|
||||||
|
|
@ -680,7 +675,7 @@ ${convo}
|
||||||
return { summaryMessage, summaryTokenCount };
|
return { summaryMessage, summaryTokenCount };
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
if (e?.message?.toLowerCase()?.includes('abort')) {
|
if (e?.message?.toLowerCase()?.includes('abort')) {
|
||||||
this.options.debug && console.debug('Aborted summarization');
|
logger.debug('[OpenAIClient] Aborted summarization');
|
||||||
const { run, runId } = this.runManager.getRunByConversationId(this.conversationId);
|
const { run, runId } = this.runManager.getRunByConversationId(this.conversationId);
|
||||||
if (run && run.error) {
|
if (run && run.error) {
|
||||||
const { error } = run;
|
const { error } = run;
|
||||||
|
|
@ -688,17 +683,13 @@ ${convo}
|
||||||
throw new Error(error);
|
throw new Error(error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
console.error('Error summarizing messages');
|
logger.error('[OpenAIClient] Error summarizing messages', e);
|
||||||
this.options.debug && console.error(e);
|
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async recordTokenUsage({ promptTokens, completionTokens }) {
|
async recordTokenUsage({ promptTokens, completionTokens }) {
|
||||||
if (this.options.debug) {
|
logger.debug('[OpenAIClient]', { promptTokens, completionTokens });
|
||||||
console.debug('promptTokens', promptTokens);
|
|
||||||
console.debug('completionTokens', completionTokens);
|
|
||||||
}
|
|
||||||
await spendTokens(
|
await spendTokens(
|
||||||
{
|
{
|
||||||
user: this.user,
|
user: this.user,
|
||||||
|
|
@ -736,14 +727,19 @@ ${convo}
|
||||||
modelOptions.prompt = payload;
|
modelOptions.prompt = payload;
|
||||||
}
|
}
|
||||||
|
|
||||||
const { debug } = this.options;
|
const baseURL = extractBaseURL(this.completionsUrl);
|
||||||
const url = extractBaseURL(this.completionsUrl);
|
// let { messages: _msgsToLog, ...modelOptionsToLog } = modelOptions;
|
||||||
if (debug) {
|
// if (modelOptionsToLog.messages) {
|
||||||
console.debug('baseURL', url);
|
// _msgsToLog = modelOptionsToLog.messages.map((msg) => {
|
||||||
console.debug('modelOptions', modelOptions);
|
// let { content, ...rest } = msg;
|
||||||
}
|
|
||||||
|
// if (content)
|
||||||
|
// return { ...rest, content: truncateText(content) };
|
||||||
|
// });
|
||||||
|
// }
|
||||||
|
logger.debug('[OpenAIClient] chatCompletion', { baseURL, modelOptions });
|
||||||
const opts = {
|
const opts = {
|
||||||
baseURL: url,
|
baseURL,
|
||||||
};
|
};
|
||||||
|
|
||||||
if (this.useOpenRouter) {
|
if (this.useOpenRouter) {
|
||||||
|
|
@ -820,7 +816,7 @@ ${convo}
|
||||||
|
|
||||||
if (!chatCompletion && UnexpectedRoleError) {
|
if (!chatCompletion && UnexpectedRoleError) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
'OpenAIError: Invalid final message: OpenAI expects final message to include role=assistant',
|
'OpenAI error: Invalid final message: OpenAI expects final message to include role=assistant',
|
||||||
);
|
);
|
||||||
} else if (!chatCompletion && error) {
|
} else if (!chatCompletion && error) {
|
||||||
throw new Error(error);
|
throw new Error(error);
|
||||||
|
|
@ -843,27 +839,23 @@ ${convo}
|
||||||
}
|
}
|
||||||
if (
|
if (
|
||||||
err?.message?.includes(
|
err?.message?.includes(
|
||||||
'OpenAIError: Invalid final message: OpenAI expects final message to include role=assistant',
|
'OpenAI error: Invalid final message: OpenAI expects final message to include role=assistant',
|
||||||
) ||
|
) ||
|
||||||
err?.message?.includes('The server had an error processing your request') ||
|
err?.message?.includes('The server had an error processing your request') ||
|
||||||
err?.message?.includes('missing finish_reason') ||
|
err?.message?.includes('missing finish_reason') ||
|
||||||
(err instanceof OpenAI.OpenAIError && err?.message?.includes('missing finish_reason'))
|
(err instanceof OpenAI.OpenAIError && err?.message?.includes('missing finish_reason'))
|
||||||
) {
|
) {
|
||||||
console.error(err);
|
logger.error('[OpenAIClient] Known OpenAI error:', err);
|
||||||
await abortController.abortCompletion();
|
await abortController.abortCompletion();
|
||||||
return intermediateReply;
|
return intermediateReply;
|
||||||
} else if (err instanceof OpenAI.APIError) {
|
} else if (err instanceof OpenAI.APIError) {
|
||||||
console.log(err.name);
|
|
||||||
console.log(err.status);
|
|
||||||
console.log(err.headers);
|
|
||||||
if (intermediateReply) {
|
if (intermediateReply) {
|
||||||
return intermediateReply;
|
return intermediateReply;
|
||||||
} else {
|
} else {
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
console.warn('[OpenAIClient.chatCompletion] Unhandled error type');
|
logger.error('[OpenAIClient.chatCompletion] Unhandled error type', err);
|
||||||
console.error(err);
|
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -10,6 +10,7 @@ const { SelfReflectionTool } = require('./tools');
|
||||||
const { isEnabled } = require('~/server/utils');
|
const { isEnabled } = require('~/server/utils');
|
||||||
const { extractBaseURL } = require('~/utils');
|
const { extractBaseURL } = require('~/utils');
|
||||||
const { loadTools } = require('./tools/util');
|
const { loadTools } = require('./tools/util');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
class PluginsClient extends OpenAIClient {
|
class PluginsClient extends OpenAIClient {
|
||||||
constructor(apiKey, options = {}) {
|
constructor(apiKey, options = {}) {
|
||||||
|
|
@ -85,17 +86,15 @@ class PluginsClient extends OpenAIClient {
|
||||||
initialMessageCount: this.currentMessages.length + 1,
|
initialMessageCount: this.currentMessages.length + 1,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (this.options.debug) {
|
logger.debug(
|
||||||
console.debug(
|
`[PluginsClient] Agent Model: ${model.modelName} | Temp: ${model.temperature} | Functions: ${this.functionsAgent}`,
|
||||||
`<-----Agent Model: ${model.modelName} | Temp: ${model.temperature} | Functions: ${this.functionsAgent}----->`,
|
);
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Map Messages to Langchain format
|
// Map Messages to Langchain format
|
||||||
const pastMessages = formatLangChainMessages(this.currentMessages.slice(0, -1), {
|
const pastMessages = formatLangChainMessages(this.currentMessages.slice(0, -1), {
|
||||||
userName: this.options?.name,
|
userName: this.options?.name,
|
||||||
});
|
});
|
||||||
this.options.debug && console.debug('pastMessages: ', pastMessages);
|
logger.debug('[PluginsClient] pastMessages: ' + pastMessages.length);
|
||||||
|
|
||||||
// TODO: use readOnly memory, TokenBufferMemory? (both unavailable in LangChainJS)
|
// TODO: use readOnly memory, TokenBufferMemory? (both unavailable in LangChainJS)
|
||||||
const memory = new BufferMemory({
|
const memory = new BufferMemory({
|
||||||
|
|
@ -124,19 +123,16 @@ class PluginsClient extends OpenAIClient {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.options.debug) {
|
logger.debug('[PluginsClient] Requested Tools', this.options.tools);
|
||||||
console.debug('Requested Tools');
|
logger.debug(
|
||||||
console.debug(this.options.tools);
|
'[PluginsClient] Loaded Tools',
|
||||||
console.debug('Loaded Tools');
|
this.tools.map((tool) => tool.name),
|
||||||
console.debug(this.tools.map((tool) => tool.name));
|
);
|
||||||
}
|
|
||||||
|
|
||||||
const handleAction = (action, runId, callback = null) => {
|
const handleAction = (action, runId, callback = null) => {
|
||||||
this.saveLatestAction(action);
|
this.saveLatestAction(action);
|
||||||
|
|
||||||
if (this.options.debug) {
|
logger.debug('[PluginsClient] Latest Agent Action ', this.actions[this.actions.length - 1]);
|
||||||
console.debug('Latest Agent Action ', this.actions[this.actions.length - 1]);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof callback === 'function') {
|
if (typeof callback === 'function') {
|
||||||
callback(action, runId);
|
callback(action, runId);
|
||||||
|
|
@ -165,9 +161,7 @@ class PluginsClient extends OpenAIClient {
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
|
|
||||||
if (this.options.debug) {
|
logger.debug('[PluginsClient] Loaded agent.');
|
||||||
console.debug('Loaded agent.');
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async executorCall(message, { signal, stream, onToolStart, onToolEnd }) {
|
async executorCall(message, { signal, stream, onToolStart, onToolEnd }) {
|
||||||
|
|
@ -183,12 +177,10 @@ class PluginsClient extends OpenAIClient {
|
||||||
});
|
});
|
||||||
const input = attempts > 1 ? errorInput : message;
|
const input = attempts > 1 ? errorInput : message;
|
||||||
|
|
||||||
if (this.options.debug) {
|
logger.debug(`[PluginsClient] Attempt ${attempts} of ${maxAttempts}`);
|
||||||
console.debug(`Attempt ${attempts} of ${maxAttempts}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.options.debug && errorMessage.length > 0) {
|
if (errorMessage.length > 0) {
|
||||||
console.debug('Caught error, input:', input);
|
logger.debug('[PluginsClient] Caught error, input:', input);
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|
@ -211,10 +203,10 @@ class PluginsClient extends OpenAIClient {
|
||||||
]);
|
]);
|
||||||
break; // Exit the loop if the function call is successful
|
break; // Exit the loop if the function call is successful
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(err);
|
logger.error('[PluginsClient] executorCall error:', err);
|
||||||
if (attempts === maxAttempts) {
|
if (attempts === maxAttempts) {
|
||||||
const { run } = this.runManager.getRunByConversationId(this.conversationId);
|
const { run } = this.runManager.getRunByConversationId(this.conversationId);
|
||||||
const defaultOutput = `Encountered an error while attempting to respond. Error: ${err.message}`;
|
const defaultOutput = `Encountered an error while attempting to respond: ${err.message}`;
|
||||||
this.result.output = run && run.error ? run.error : defaultOutput;
|
this.result.output = run && run.error ? run.error : defaultOutput;
|
||||||
this.result.errorMessage = run && run.error ? run.error : err.message;
|
this.result.errorMessage = run && run.error ? run.error : err.message;
|
||||||
this.result.intermediateSteps = this.actions;
|
this.result.intermediateSteps = this.actions;
|
||||||
|
|
@ -226,8 +218,11 @@ class PluginsClient extends OpenAIClient {
|
||||||
|
|
||||||
async handleResponseMessage(responseMessage, saveOptions, user) {
|
async handleResponseMessage(responseMessage, saveOptions, user) {
|
||||||
const { output, errorMessage, ...result } = this.result;
|
const { output, errorMessage, ...result } = this.result;
|
||||||
this.options.debug &&
|
logger.debug('[PluginsClient][handleResponseMessage] Output:', {
|
||||||
console.debug('[handleResponseMessage] Output:', { output, errorMessage, ...result });
|
output,
|
||||||
|
errorMessage,
|
||||||
|
...result,
|
||||||
|
});
|
||||||
const { error } = responseMessage;
|
const { error } = responseMessage;
|
||||||
if (!error) {
|
if (!error) {
|
||||||
responseMessage.tokenCount = this.getTokenCountForResponse(responseMessage);
|
responseMessage.tokenCount = this.getTokenCountForResponse(responseMessage);
|
||||||
|
|
@ -251,7 +246,7 @@ class PluginsClient extends OpenAIClient {
|
||||||
this.setOptions(opts);
|
this.setOptions(opts);
|
||||||
return super.sendMessage(message, opts);
|
return super.sendMessage(message, opts);
|
||||||
}
|
}
|
||||||
this.options.debug && console.log('Plugins sendMessage', message, opts);
|
logger.debug('[PluginsClient] sendMessage', { message, opts });
|
||||||
const {
|
const {
|
||||||
user,
|
user,
|
||||||
isEdited,
|
isEdited,
|
||||||
|
|
@ -281,10 +276,10 @@ class PluginsClient extends OpenAIClient {
|
||||||
);
|
);
|
||||||
|
|
||||||
if (tokenCountMap) {
|
if (tokenCountMap) {
|
||||||
console.dir(tokenCountMap, { depth: null });
|
logger.debug('[PluginsClient] tokenCountMap', { tokenCountMap });
|
||||||
if (tokenCountMap[userMessage.messageId]) {
|
if (tokenCountMap[userMessage.messageId]) {
|
||||||
userMessage.tokenCount = tokenCountMap[userMessage.messageId];
|
userMessage.tokenCount = tokenCountMap[userMessage.messageId];
|
||||||
console.log('userMessage.tokenCount', userMessage.tokenCount);
|
logger.debug('[PluginsClient] userMessage.tokenCount', userMessage.tokenCount);
|
||||||
}
|
}
|
||||||
this.handleTokenCountMap(tokenCountMap);
|
this.handleTokenCountMap(tokenCountMap);
|
||||||
}
|
}
|
||||||
|
|
@ -370,10 +365,7 @@ class PluginsClient extends OpenAIClient {
|
||||||
return await this.handleResponseMessage(responseMessage, saveOptions, user);
|
return await this.handleResponseMessage(responseMessage, saveOptions, user);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.options.debug) {
|
logger.debug('[PluginsClient] Completion phase: this.result', this.result);
|
||||||
console.debug('Plugins completion phase: this.result');
|
|
||||||
console.debug(this.result);
|
|
||||||
}
|
|
||||||
|
|
||||||
const promptPrefix = buildPromptPrefix({
|
const promptPrefix = buildPromptPrefix({
|
||||||
result: this.result,
|
result: this.result,
|
||||||
|
|
@ -381,28 +373,20 @@ class PluginsClient extends OpenAIClient {
|
||||||
functionsAgent: this.functionsAgent,
|
functionsAgent: this.functionsAgent,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (this.options.debug) {
|
logger.debug('[PluginsClient]', { promptPrefix });
|
||||||
console.debug('Plugins: promptPrefix');
|
|
||||||
console.debug(promptPrefix);
|
|
||||||
}
|
|
||||||
|
|
||||||
payload = await this.buildCompletionPrompt({
|
payload = await this.buildCompletionPrompt({
|
||||||
messages: this.currentMessages,
|
messages: this.currentMessages,
|
||||||
promptPrefix,
|
promptPrefix,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (this.options.debug) {
|
logger.debug('[PluginsClient] buildCompletionPrompt Payload', payload);
|
||||||
console.debug('buildCompletionPrompt Payload');
|
|
||||||
console.debug(payload);
|
|
||||||
}
|
|
||||||
responseMessage.text = await this.sendCompletion(payload, opts);
|
responseMessage.text = await this.sendCompletion(payload, opts);
|
||||||
return await this.handleResponseMessage(responseMessage, saveOptions, user);
|
return await this.handleResponseMessage(responseMessage, saveOptions, user);
|
||||||
}
|
}
|
||||||
|
|
||||||
async buildCompletionPrompt({ messages, promptPrefix: _promptPrefix }) {
|
async buildCompletionPrompt({ messages, promptPrefix: _promptPrefix }) {
|
||||||
if (this.options.debug) {
|
logger.debug('[PluginsClient] buildCompletionPrompt messages', messages);
|
||||||
console.debug('buildCompletionPrompt messages', messages);
|
|
||||||
}
|
|
||||||
|
|
||||||
const orderedMessages = messages;
|
const orderedMessages = messages;
|
||||||
let promptPrefix = _promptPrefix.trim();
|
let promptPrefix = _promptPrefix.trim();
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
const { Readable } = require('stream');
|
const { Readable } = require('stream');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
class TextStream extends Readable {
|
class TextStream extends Readable {
|
||||||
constructor(text, options = {}) {
|
constructor(text, options = {}) {
|
||||||
|
|
@ -38,7 +39,7 @@ class TextStream extends Readable {
|
||||||
});
|
});
|
||||||
|
|
||||||
this.on('end', () => {
|
this.on('end', () => {
|
||||||
// console.log('Stream ended');
|
// logger.debug('[processTextStream] Stream ended');
|
||||||
resolve();
|
resolve();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -50,7 +51,7 @@ class TextStream extends Readable {
|
||||||
try {
|
try {
|
||||||
await streamPromise;
|
await streamPromise;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Error processing text stream:', err);
|
logger.error('[processTextStream] Error in text stream:', err);
|
||||||
// Handle the error appropriately, e.g., return an error message or throw an error
|
// Handle the error appropriately, e.g., return an error message or throw an error
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
const { ZeroShotAgentOutputParser } = require('langchain/agents');
|
const { ZeroShotAgentOutputParser } = require('langchain/agents');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
class CustomOutputParser extends ZeroShotAgentOutputParser {
|
class CustomOutputParser extends ZeroShotAgentOutputParser {
|
||||||
constructor(fields) {
|
constructor(fields) {
|
||||||
|
|
@ -64,9 +65,9 @@ class CustomOutputParser extends ZeroShotAgentOutputParser {
|
||||||
const match = this.actionValues.exec(text); // old v2
|
const match = this.actionValues.exec(text); // old v2
|
||||||
|
|
||||||
if (!match) {
|
if (!match) {
|
||||||
console.log(
|
logger.debug(
|
||||||
'\n\n<----------------------HIT NO MATCH PARSING ERROR---------------------->\n\n',
|
'\n\n<----------------------[CustomOutputParser] HIT NO MATCH PARSING ERROR---------------------->\n\n' +
|
||||||
match,
|
match,
|
||||||
);
|
);
|
||||||
const thoughts = text.replace(/[tT]hought:/, '').split('\n');
|
const thoughts = text.replace(/[tT]hought:/, '').split('\n');
|
||||||
// return {
|
// return {
|
||||||
|
|
@ -84,9 +85,9 @@ class CustomOutputParser extends ZeroShotAgentOutputParser {
|
||||||
let selectedTool = match?.[1].trim().toLowerCase();
|
let selectedTool = match?.[1].trim().toLowerCase();
|
||||||
|
|
||||||
if (match && selectedTool === 'n/a') {
|
if (match && selectedTool === 'n/a') {
|
||||||
console.log(
|
logger.debug(
|
||||||
'\n\n<----------------------HIT N/A PARSING ERROR---------------------->\n\n',
|
'\n\n<----------------------[CustomOutputParser] HIT N/A PARSING ERROR---------------------->\n\n' +
|
||||||
match,
|
match,
|
||||||
);
|
);
|
||||||
return {
|
return {
|
||||||
tool: 'self-reflection',
|
tool: 'self-reflection',
|
||||||
|
|
@ -97,25 +98,25 @@ class CustomOutputParser extends ZeroShotAgentOutputParser {
|
||||||
|
|
||||||
let toolIsValid = this.checkIfValidTool(selectedTool);
|
let toolIsValid = this.checkIfValidTool(selectedTool);
|
||||||
if (match && !toolIsValid) {
|
if (match && !toolIsValid) {
|
||||||
console.log(
|
logger.debug(
|
||||||
'\n\n<----------------Tool invalid: Re-assigning Selected Tool---------------->\n\n',
|
'\n\n<----------------[CustomOutputParser] Tool invalid: Re-assigning Selected Tool---------------->\n\n' +
|
||||||
match,
|
match,
|
||||||
);
|
);
|
||||||
selectedTool = this.getValidTool(selectedTool);
|
selectedTool = this.getValidTool(selectedTool);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (match && !selectedTool) {
|
if (match && !selectedTool) {
|
||||||
console.log(
|
logger.debug(
|
||||||
'\n\n<----------------------HIT INVALID TOOL PARSING ERROR---------------------->\n\n',
|
'\n\n<----------------------[CustomOutputParser] HIT INVALID TOOL PARSING ERROR---------------------->\n\n' +
|
||||||
match,
|
match,
|
||||||
);
|
);
|
||||||
selectedTool = 'self-reflection';
|
selectedTool = 'self-reflection';
|
||||||
}
|
}
|
||||||
|
|
||||||
if (match && !match[2]) {
|
if (match && !match[2]) {
|
||||||
console.log(
|
logger.debug(
|
||||||
'\n\n<----------------------HIT NO ACTION INPUT PARSING ERROR---------------------->\n\n',
|
'\n\n<----------------------[CustomOutputParser] HIT NO ACTION INPUT PARSING ERROR---------------------->\n\n' +
|
||||||
match,
|
match,
|
||||||
);
|
);
|
||||||
|
|
||||||
// In case there is no action input, let's double-check if there is an action input in 'text' variable
|
// In case there is no action input, let's double-check if there is an action input in 'text' variable
|
||||||
|
|
@ -139,7 +140,9 @@ class CustomOutputParser extends ZeroShotAgentOutputParser {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (match && selectedTool.length > this.longestToolName.length) {
|
if (match && selectedTool.length > this.longestToolName.length) {
|
||||||
console.log('\n\n<----------------------HIT LONG PARSING ERROR---------------------->\n\n');
|
logger.debug(
|
||||||
|
'\n\n<----------------------[CustomOutputParser] HIT LONG PARSING ERROR---------------------->\n\n',
|
||||||
|
);
|
||||||
|
|
||||||
let action, input, thought;
|
let action, input, thought;
|
||||||
let firstIndex = Infinity;
|
let firstIndex = Infinity;
|
||||||
|
|
@ -156,9 +159,9 @@ class CustomOutputParser extends ZeroShotAgentOutputParser {
|
||||||
// In case there is no action input, let's double-check if there is an action input in 'text' variable
|
// In case there is no action input, let's double-check if there is an action input in 'text' variable
|
||||||
const actionInputMatch = this.actionInputRegex.exec(text);
|
const actionInputMatch = this.actionInputRegex.exec(text);
|
||||||
if (action && actionInputMatch) {
|
if (action && actionInputMatch) {
|
||||||
console.log(
|
logger.debug(
|
||||||
'\n\n<------Matched Action Input in Long Parsing Error------>\n\n',
|
'\n\n<------[CustomOutputParser] Matched Action Input in Long Parsing Error------>\n\n' +
|
||||||
actionInputMatch,
|
actionInputMatch,
|
||||||
);
|
);
|
||||||
return {
|
return {
|
||||||
tool: action,
|
tool: action,
|
||||||
|
|
@ -185,15 +188,14 @@ class CustomOutputParser extends ZeroShotAgentOutputParser {
|
||||||
|
|
||||||
const inputMatch = this.actionValues.exec(returnValues.log); //new
|
const inputMatch = this.actionValues.exec(returnValues.log); //new
|
||||||
if (inputMatch) {
|
if (inputMatch) {
|
||||||
console.log('inputMatch');
|
logger.debug('[CustomOutputParser] inputMatch', inputMatch);
|
||||||
console.dir(inputMatch, { depth: null });
|
|
||||||
returnValues.toolInput = inputMatch[1].replaceAll('"', '').trim();
|
returnValues.toolInput = inputMatch[1].replaceAll('"', '').trim();
|
||||||
returnValues.log = returnValues.log.replace(this.actionValues, '');
|
returnValues.log = returnValues.log.replace(this.actionValues, '');
|
||||||
}
|
}
|
||||||
|
|
||||||
return returnValues;
|
return returnValues;
|
||||||
} else {
|
} else {
|
||||||
console.log('No valid tool mentioned.', this.tools, text);
|
logger.debug('[CustomOutputParser] No valid tool mentioned.', this.tools, text);
|
||||||
return {
|
return {
|
||||||
tool: 'self-reflection',
|
tool: 'self-reflection',
|
||||||
toolInput: 'Hypothetical actions: \n"' + text + '"\n',
|
toolInput: 'Hypothetical actions: \n"' + text + '"\n',
|
||||||
|
|
@ -202,8 +204,8 @@ class CustomOutputParser extends ZeroShotAgentOutputParser {
|
||||||
}
|
}
|
||||||
|
|
||||||
// if (action && input) {
|
// if (action && input) {
|
||||||
// console.log('Action:', action);
|
// logger.debug('Action:', action);
|
||||||
// console.log('Input:', input);
|
// logger.debug('Input:', input);
|
||||||
// }
|
// }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,8 @@ const {
|
||||||
SystemMessagePromptTemplate,
|
SystemMessagePromptTemplate,
|
||||||
HumanMessagePromptTemplate,
|
HumanMessagePromptTemplate,
|
||||||
} = require('langchain/prompts');
|
} = require('langchain/prompts');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const PREFIX = 'You are a helpful AI assistant.';
|
const PREFIX = 'You are a helpful AI assistant.';
|
||||||
|
|
||||||
function parseOutput(message) {
|
function parseOutput(message) {
|
||||||
|
|
@ -112,7 +114,7 @@ class FunctionsAgent extends Agent {
|
||||||
valuesForLLM,
|
valuesForLLM,
|
||||||
callbackManager,
|
callbackManager,
|
||||||
);
|
);
|
||||||
console.log('message', message);
|
logger.debug('[FunctionsAgent] plan message', message);
|
||||||
return parseOutput(message);
|
return parseOutput(message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,7 @@ const { EModelEndpoint } = require('librechat-data-provider');
|
||||||
const { formatFromLangChain } = require('~/app/clients/prompts');
|
const { formatFromLangChain } = require('~/app/clients/prompts');
|
||||||
const checkBalance = require('~/models/checkBalance');
|
const checkBalance = require('~/models/checkBalance');
|
||||||
const { isEnabled } = require('~/server/utils');
|
const { isEnabled } = require('~/server/utils');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const createStartHandler = ({
|
const createStartHandler = ({
|
||||||
context,
|
context,
|
||||||
|
|
@ -16,9 +17,15 @@ const createStartHandler = ({
|
||||||
const { model, functions, function_call } = invocation_params;
|
const { model, functions, function_call } = invocation_params;
|
||||||
const messages = _messages[0].map(formatFromLangChain);
|
const messages = _messages[0].map(formatFromLangChain);
|
||||||
|
|
||||||
if (manager.debug) {
|
logger.debug(`[createStartHandler] handleChatModelStart: ${context}`, {
|
||||||
console.log(`handleChatModelStart: ${context}`);
|
model,
|
||||||
console.dir({ model, functions, function_call }, { depth: null });
|
function_call,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (context !== 'title') {
|
||||||
|
logger.debug(`[createStartHandler] handleChatModelStart: ${context}`, {
|
||||||
|
functions,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const payload = { messages };
|
const payload = { messages };
|
||||||
|
|
@ -35,9 +42,10 @@ const createStartHandler = ({
|
||||||
}
|
}
|
||||||
|
|
||||||
prelimPromptTokens += promptTokensEstimate(payload);
|
prelimPromptTokens += promptTokensEstimate(payload);
|
||||||
if (manager.debug) {
|
logger.debug('[createStartHandler]', {
|
||||||
console.log('Prelim Prompt Tokens & Token Buffer', prelimPromptTokens, tokenBuffer);
|
prelimPromptTokens,
|
||||||
}
|
tokenBuffer,
|
||||||
|
});
|
||||||
prelimPromptTokens += tokenBuffer;
|
prelimPromptTokens += tokenBuffer;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|
@ -61,7 +69,7 @@ const createStartHandler = ({
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(`[${context}] checkBalance error`, err);
|
logger.error(`[createStartHandler][${context}] checkBalance error`, err);
|
||||||
manager.abortController.abort();
|
manager.abortController.abort();
|
||||||
if (context === 'summary' || context === 'plugins') {
|
if (context === 'summary' || context === 'plugins') {
|
||||||
manager.addRun(runId, { conversationId, error: err.message });
|
manager.addRun(runId, { conversationId, error: err.message });
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
const { z } = require('zod');
|
const { z } = require('zod');
|
||||||
const { langPrompt, createTitlePrompt, escapeBraces, getSnippet } = require('../prompts');
|
const { langPrompt, createTitlePrompt, escapeBraces, getSnippet } = require('../prompts');
|
||||||
const { createStructuredOutputChainFromZod } = require('langchain/chains/openai_functions');
|
const { createStructuredOutputChainFromZod } = require('langchain/chains/openai_functions');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const langSchema = z.object({
|
const langSchema = z.object({
|
||||||
language: z.string().describe('The language of the input text (full noun, no abbreviations).'),
|
language: z.string().describe('The language of the input text (full noun, no abbreviations).'),
|
||||||
|
|
@ -30,8 +31,7 @@ const runTitleChain = async ({ llm, text, convo, signal, callbacks }) => {
|
||||||
try {
|
try {
|
||||||
snippet = getSnippet(text);
|
snippet = getSnippet(text);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log('Error getting snippet of text for titleChain');
|
logger.error('[runTitleChain] Error getting snippet of text for titleChain', e);
|
||||||
console.log(e);
|
|
||||||
}
|
}
|
||||||
const languageChain = createLanguageChain({ llm, callbacks });
|
const languageChain = createLanguageChain({ llm, callbacks });
|
||||||
const titleChain = createTitleChain({ llm, callbacks, convo: escapeBraces(convo) });
|
const titleChain = createTitleChain({ llm, callbacks, convo: escapeBraces(convo) });
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
const { createStartHandler } = require('../callbacks');
|
const { createStartHandler } = require('~/app/clients/callbacks');
|
||||||
const spendTokens = require('../../../models/spendTokens');
|
const spendTokens = require('~/models/spendTokens');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
class RunManager {
|
class RunManager {
|
||||||
constructor(fields) {
|
constructor(fields) {
|
||||||
|
|
@ -35,7 +36,7 @@ class RunManager {
|
||||||
if (this.runs.has(runId)) {
|
if (this.runs.has(runId)) {
|
||||||
this.runs.delete(runId);
|
this.runs.delete(runId);
|
||||||
} else {
|
} else {
|
||||||
console.error(`Run with ID ${runId} does not exist.`);
|
logger.error(`[api/app/clients/llm/RunManager] Run with ID ${runId} does not exist.`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -57,10 +58,19 @@ class RunManager {
|
||||||
{
|
{
|
||||||
handleChatModelStart: createStartHandler({ ...metadata, manager: this }),
|
handleChatModelStart: createStartHandler({ ...metadata, manager: this }),
|
||||||
handleLLMEnd: async (output, runId, _parentRunId) => {
|
handleLLMEnd: async (output, runId, _parentRunId) => {
|
||||||
if (this.debug) {
|
const { llmOutput, ..._output } = output;
|
||||||
console.log(`handleLLMEnd: ${JSON.stringify(metadata)}`);
|
logger.debug(`[RunManager] handleLLMEnd: ${JSON.stringify(metadata)}`, {
|
||||||
console.dir({ output, runId, _parentRunId }, { depth: null });
|
runId,
|
||||||
|
_parentRunId,
|
||||||
|
llmOutput,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (metadata.context !== 'title') {
|
||||||
|
logger.debug('[RunManager] handleLLMEnd:', {
|
||||||
|
output: _output,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const { tokenUsage } = output.llmOutput;
|
const { tokenUsage } = output.llmOutput;
|
||||||
const run = this.getRunById(runId);
|
const run = this.getRunById(runId);
|
||||||
this.removeRun(runId);
|
this.removeRun(runId);
|
||||||
|
|
@ -74,8 +84,7 @@ class RunManager {
|
||||||
await spendTokens(txData, tokenUsage);
|
await spendTokens(txData, tokenUsage);
|
||||||
},
|
},
|
||||||
handleLLMError: async (err) => {
|
handleLLMError: async (err) => {
|
||||||
this.debug && console.log(`handleLLMError: ${JSON.stringify(metadata)}`);
|
logger.error(`[RunManager] handleLLMError: ${JSON.stringify(metadata)}`, err);
|
||||||
this.debug && console.error(err);
|
|
||||||
if (metadata.context === 'title') {
|
if (metadata.context === 'title') {
|
||||||
return;
|
return;
|
||||||
} else if (metadata.context === 'plugins') {
|
} else if (metadata.context === 'plugins') {
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
const { ConversationSummaryBufferMemory, ChatMessageHistory } = require('langchain/memory');
|
const { ConversationSummaryBufferMemory, ChatMessageHistory } = require('langchain/memory');
|
||||||
const { formatLangChainMessages, SUMMARY_PROMPT } = require('../prompts');
|
const { formatLangChainMessages, SUMMARY_PROMPT } = require('../prompts');
|
||||||
const { predictNewSummary } = require('../chains');
|
const { predictNewSummary } = require('../chains');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const createSummaryBufferMemory = ({ llm, prompt, messages, ...rest }) => {
|
const createSummaryBufferMemory = ({ llm, prompt, messages, ...rest }) => {
|
||||||
const chatHistory = new ChatMessageHistory(messages);
|
const chatHistory = new ChatMessageHistory(messages);
|
||||||
|
|
@ -22,9 +23,8 @@ const summaryBuffer = async ({
|
||||||
prompt = SUMMARY_PROMPT,
|
prompt = SUMMARY_PROMPT,
|
||||||
signal,
|
signal,
|
||||||
}) => {
|
}) => {
|
||||||
if (debug && previous_summary) {
|
if (previous_summary) {
|
||||||
console.log('<-----------PREVIOUS SUMMARY----------->\n\n');
|
logger.debug('[summaryBuffer]', { previous_summary });
|
||||||
console.log(previous_summary);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const formattedMessages = formatLangChainMessages(context, formatOptions);
|
const formattedMessages = formatLangChainMessages(context, formatOptions);
|
||||||
|
|
@ -46,8 +46,7 @@ const summaryBuffer = async ({
|
||||||
const messages = await chatPromptMemory.chatHistory.getMessages();
|
const messages = await chatPromptMemory.chatHistory.getMessages();
|
||||||
|
|
||||||
if (debug) {
|
if (debug) {
|
||||||
console.log('<-----------SUMMARY BUFFER MESSAGES----------->\n\n');
|
logger.debug('[summaryBuffer]', { summary_buffer_messages: messages.length });
|
||||||
console.log(JSON.stringify(messages));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const predictSummary = await predictNewSummary({
|
const predictSummary = await predictNewSummary({
|
||||||
|
|
@ -58,8 +57,7 @@ const summaryBuffer = async ({
|
||||||
});
|
});
|
||||||
|
|
||||||
if (debug) {
|
if (debug) {
|
||||||
console.log('<-----------SUMMARY----------->\n\n');
|
logger.debug('[summaryBuffer]', { summary: predictSummary });
|
||||||
console.log(JSON.stringify(predictSummary));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return { role: 'system', content: predictSummary };
|
return { role: 'system', content: predictSummary };
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The `addImages` function corrects any erroneous image URLs in the `responseMessage.text`
|
* The `addImages` function corrects any erroneous image URLs in the `responseMessage.text`
|
||||||
* and appends image observations from `intermediateSteps` if they are not already present.
|
* and appends image observations from `intermediateSteps` if they are not already present.
|
||||||
|
|
@ -20,7 +22,7 @@
|
||||||
*
|
*
|
||||||
* addImages(intermediateSteps, responseMessage);
|
* addImages(intermediateSteps, responseMessage);
|
||||||
*
|
*
|
||||||
* console.log(responseMessage.text);
|
* logger.debug(responseMessage.text);
|
||||||
* // Outputs: 'Some text with \n'
|
* // Outputs: 'Some text with \n'
|
||||||
*
|
*
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
|
|
@ -62,7 +64,7 @@ function addImages(intermediateSteps, responseMessage) {
|
||||||
if (observedImagePath && !responseMessage.text.includes(observedImagePath[0])) {
|
if (observedImagePath && !responseMessage.text.includes(observedImagePath[0])) {
|
||||||
responseMessage.text += '\n' + observation;
|
responseMessage.text += '\n' + observation;
|
||||||
if (process.env.DEBUG_PLUGINS) {
|
if (process.env.DEBUG_PLUGINS) {
|
||||||
console.debug('[addImages] added image from intermediateSteps');
|
logger.debug('[addImages] added image from intermediateSteps:', observation);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -1,238 +0,0 @@
|
||||||
const { Tool } = require('langchain/tools');
|
|
||||||
const yaml = require('js-yaml');
|
|
||||||
|
|
||||||
/*
|
|
||||||
export interface AIPluginToolParams {
|
|
||||||
name: string;
|
|
||||||
description: string;
|
|
||||||
apiSpec: string;
|
|
||||||
openaiSpec: string;
|
|
||||||
model: BaseLanguageModel;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface PathParameter {
|
|
||||||
name: string;
|
|
||||||
description: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface Info {
|
|
||||||
title: string;
|
|
||||||
description: string;
|
|
||||||
version: string;
|
|
||||||
}
|
|
||||||
export interface PathMethod {
|
|
||||||
summary: string;
|
|
||||||
operationId: string;
|
|
||||||
parameters?: PathParameter[];
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ApiSpec {
|
|
||||||
openapi: string;
|
|
||||||
info: Info;
|
|
||||||
paths: { [key: string]: { [key: string]: PathMethod } };
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
function isJson(str) {
|
|
||||||
try {
|
|
||||||
JSON.parse(str);
|
|
||||||
} catch (e) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
function convertJsonToYamlIfApplicable(spec) {
|
|
||||||
if (isJson(spec)) {
|
|
||||||
const jsonData = JSON.parse(spec);
|
|
||||||
return yaml.dump(jsonData);
|
|
||||||
}
|
|
||||||
return spec;
|
|
||||||
}
|
|
||||||
|
|
||||||
function extractShortVersion(openapiSpec) {
|
|
||||||
openapiSpec = convertJsonToYamlIfApplicable(openapiSpec);
|
|
||||||
try {
|
|
||||||
const fullApiSpec = yaml.load(openapiSpec);
|
|
||||||
const shortApiSpec = {
|
|
||||||
openapi: fullApiSpec.openapi,
|
|
||||||
info: fullApiSpec.info,
|
|
||||||
paths: {},
|
|
||||||
};
|
|
||||||
|
|
||||||
for (let path in fullApiSpec.paths) {
|
|
||||||
shortApiSpec.paths[path] = {};
|
|
||||||
for (let method in fullApiSpec.paths[path]) {
|
|
||||||
shortApiSpec.paths[path][method] = {
|
|
||||||
summary: fullApiSpec.paths[path][method].summary,
|
|
||||||
operationId: fullApiSpec.paths[path][method].operationId,
|
|
||||||
parameters: fullApiSpec.paths[path][method].parameters?.map((parameter) => ({
|
|
||||||
name: parameter.name,
|
|
||||||
description: parameter.description,
|
|
||||||
})),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return yaml.dump(shortApiSpec);
|
|
||||||
} catch (e) {
|
|
||||||
console.log(e);
|
|
||||||
return '';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function printOperationDetails(operationId, openapiSpec) {
|
|
||||||
openapiSpec = convertJsonToYamlIfApplicable(openapiSpec);
|
|
||||||
let returnText = '';
|
|
||||||
try {
|
|
||||||
let doc = yaml.load(openapiSpec);
|
|
||||||
let servers = doc.servers;
|
|
||||||
let paths = doc.paths;
|
|
||||||
let components = doc.components;
|
|
||||||
|
|
||||||
for (let path in paths) {
|
|
||||||
for (let method in paths[path]) {
|
|
||||||
let operation = paths[path][method];
|
|
||||||
if (operation.operationId === operationId) {
|
|
||||||
returnText += `The API request to do for operationId "${operationId}" is:\n`;
|
|
||||||
returnText += `Method: ${method.toUpperCase()}\n`;
|
|
||||||
|
|
||||||
let url = servers[0].url + path;
|
|
||||||
returnText += `Path: ${url}\n`;
|
|
||||||
|
|
||||||
returnText += 'Parameters:\n';
|
|
||||||
if (operation.parameters) {
|
|
||||||
for (let param of operation.parameters) {
|
|
||||||
let required = param.required ? '' : ' (optional),';
|
|
||||||
returnText += `- ${param.name} (${param.in},${required} ${param.schema.type}): ${param.description}\n`;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
returnText += ' None\n';
|
|
||||||
}
|
|
||||||
returnText += '\n';
|
|
||||||
|
|
||||||
let responseSchema = operation.responses['200'].content['application/json'].schema;
|
|
||||||
|
|
||||||
// Check if schema is a reference
|
|
||||||
if (responseSchema.$ref) {
|
|
||||||
// Extract schema name from reference
|
|
||||||
let schemaName = responseSchema.$ref.split('/').pop();
|
|
||||||
// Look up schema in components
|
|
||||||
responseSchema = components.schemas[schemaName];
|
|
||||||
}
|
|
||||||
|
|
||||||
returnText += 'Response schema:\n';
|
|
||||||
returnText += '- Type: ' + responseSchema.type + '\n';
|
|
||||||
returnText += '- Additional properties:\n';
|
|
||||||
returnText += ' - Type: ' + responseSchema.additionalProperties?.type + '\n';
|
|
||||||
if (responseSchema.additionalProperties?.properties) {
|
|
||||||
returnText += ' - Properties:\n';
|
|
||||||
for (let prop in responseSchema.additionalProperties.properties) {
|
|
||||||
returnText += ` - ${prop} (${responseSchema.additionalProperties.properties[prop].type}): Description not provided in OpenAPI spec\n`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (returnText === '') {
|
|
||||||
returnText += `No operation with operationId "${operationId}" found.`;
|
|
||||||
}
|
|
||||||
return returnText;
|
|
||||||
} catch (e) {
|
|
||||||
console.log(e);
|
|
||||||
return '';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class AIPluginTool extends Tool {
|
|
||||||
/*
|
|
||||||
private _name: string;
|
|
||||||
private _description: string;
|
|
||||||
apiSpec: string;
|
|
||||||
openaiSpec: string;
|
|
||||||
model: BaseLanguageModel;
|
|
||||||
*/
|
|
||||||
|
|
||||||
get name() {
|
|
||||||
return this._name;
|
|
||||||
}
|
|
||||||
|
|
||||||
get description() {
|
|
||||||
return this._description;
|
|
||||||
}
|
|
||||||
|
|
||||||
constructor(params) {
|
|
||||||
super();
|
|
||||||
this._name = params.name;
|
|
||||||
this._description = params.description;
|
|
||||||
this.apiSpec = params.apiSpec;
|
|
||||||
this.openaiSpec = params.openaiSpec;
|
|
||||||
this.model = params.model;
|
|
||||||
}
|
|
||||||
|
|
||||||
async _call(input) {
|
|
||||||
let date = new Date();
|
|
||||||
let fullDate = `Date: ${date.getDate()}/${
|
|
||||||
date.getMonth() + 1
|
|
||||||
}/${date.getFullYear()}, Time: ${date.getHours()}:${date.getMinutes()}:${date.getSeconds()}`;
|
|
||||||
const prompt = `${fullDate}\nQuestion: ${input} \n${this.apiSpec}.`;
|
|
||||||
console.log(prompt);
|
|
||||||
const gptResponse = await this.model.predict(prompt);
|
|
||||||
let operationId = gptResponse.match(/operationId: (.*)/)?.[1];
|
|
||||||
if (!operationId) {
|
|
||||||
return 'No operationId found in the response';
|
|
||||||
}
|
|
||||||
if (operationId == 'No API path found to answer the question') {
|
|
||||||
return 'No API path found to answer the question';
|
|
||||||
}
|
|
||||||
|
|
||||||
let openApiData = printOperationDetails(operationId, this.openaiSpec);
|
|
||||||
|
|
||||||
return openApiData;
|
|
||||||
}
|
|
||||||
|
|
||||||
static async fromPluginUrl(url, model) {
|
|
||||||
const aiPluginRes = await fetch(url, {});
|
|
||||||
if (!aiPluginRes.ok) {
|
|
||||||
throw new Error(`Failed to fetch plugin from ${url} with status ${aiPluginRes.status}`);
|
|
||||||
}
|
|
||||||
const aiPluginJson = await aiPluginRes.json();
|
|
||||||
const apiUrlRes = await fetch(aiPluginJson.api.url, {});
|
|
||||||
if (!apiUrlRes.ok) {
|
|
||||||
throw new Error(
|
|
||||||
`Failed to fetch API spec from ${aiPluginJson.api.url} with status ${apiUrlRes.status}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
const apiUrlJson = await apiUrlRes.text();
|
|
||||||
const shortApiSpec = extractShortVersion(apiUrlJson);
|
|
||||||
return new AIPluginTool({
|
|
||||||
name: aiPluginJson.name_for_model.toLowerCase(),
|
|
||||||
description: `A \`tool\` to learn the API documentation for ${aiPluginJson.name_for_model.toLowerCase()}, after which you can use 'http_request' to make the actual API call. Short description of how to use the API's results: ${
|
|
||||||
aiPluginJson.description_for_model
|
|
||||||
})`,
|
|
||||||
apiSpec: `
|
|
||||||
As an AI, your task is to identify the operationId of the relevant API path based on the condensed OpenAPI specifications provided.
|
|
||||||
|
|
||||||
Please note:
|
|
||||||
|
|
||||||
1. Do not imagine URLs. Only use the information provided in the condensed OpenAPI specifications.
|
|
||||||
|
|
||||||
2. Do not guess the operationId. Identify it strictly based on the API paths and their descriptions.
|
|
||||||
|
|
||||||
Your output should only include:
|
|
||||||
- operationId: The operationId of the relevant API path
|
|
||||||
|
|
||||||
If you cannot find a suitable API path based on the OpenAPI specifications, please answer only "operationId: No API path found to answer the question".
|
|
||||||
|
|
||||||
Now, based on the question above and the condensed OpenAPI specifications given below, identify the operationId:
|
|
||||||
|
|
||||||
\`\`\`
|
|
||||||
${shortApiSpec}
|
|
||||||
\`\`\`
|
|
||||||
`,
|
|
||||||
openaiSpec: apiUrlJson,
|
|
||||||
model: model,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = AIPluginTool;
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
const { StructuredTool } = require('langchain/tools');
|
|
||||||
const { z } = require('zod');
|
const { z } = require('zod');
|
||||||
|
const { StructuredTool } = require('langchain/tools');
|
||||||
const { SearchClient, AzureKeyCredential } = require('@azure/search-documents');
|
const { SearchClient, AzureKeyCredential } = require('@azure/search-documents');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
class AzureAISearch extends StructuredTool {
|
class AzureAISearch extends StructuredTool {
|
||||||
// Constants for default values
|
// Constants for default values
|
||||||
|
|
@ -94,7 +95,7 @@ class AzureAISearch extends StructuredTool {
|
||||||
}
|
}
|
||||||
return JSON.stringify(resultDocuments);
|
return JSON.stringify(resultDocuments);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Azure AI Search request failed: ${error.message}`);
|
logger.error('Azure AI Search request failed', error);
|
||||||
return 'There was an error with Azure AI Search.';
|
return 'There was an error with Azure AI Search.';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -3,13 +3,14 @@
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
const OpenAI = require('openai');
|
const OpenAI = require('openai');
|
||||||
// const { genAzureEndpoint } = require('../../../utils/genAzureEndpoints');
|
// const { genAzureEndpoint } = require('~/utils/genAzureEndpoints');
|
||||||
const { Tool } = require('langchain/tools');
|
const { Tool } = require('langchain/tools');
|
||||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||||
|
const extractBaseURL = require('~/utils/extractBaseURL');
|
||||||
const saveImageFromUrl = require('./saveImageFromUrl');
|
const saveImageFromUrl = require('./saveImageFromUrl');
|
||||||
const extractBaseURL = require('../../../utils/extractBaseURL');
|
const { logger } = require('~/config');
|
||||||
const { DALLE_REVERSE_PROXY, PROXY } = process.env;
|
|
||||||
|
|
||||||
|
const { DALLE_REVERSE_PROXY, PROXY } = process.env;
|
||||||
class OpenAICreateImage extends Tool {
|
class OpenAICreateImage extends Tool {
|
||||||
constructor(fields = {}) {
|
constructor(fields = {}) {
|
||||||
super();
|
super();
|
||||||
|
|
@ -102,9 +103,12 @@ Guidelines:
|
||||||
|
|
||||||
if (match) {
|
if (match) {
|
||||||
imageName = match[0];
|
imageName = match[0];
|
||||||
console.log(imageName); // Output: img-lgCf7ppcbhqQrz6a5ear6FOb.png
|
logger.debug('[DALL-E]', { imageName }); // Output: img-lgCf7ppcbhqQrz6a5ear6FOb.png
|
||||||
} else {
|
} else {
|
||||||
console.log('No image name found in the string.');
|
logger.debug('[DALL-E] No image name found in the string.', {
|
||||||
|
theImageUrl,
|
||||||
|
data: resp.data[0],
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
this.outputPath = path.resolve(__dirname, '..', '..', '..', '..', 'client', 'public', 'images');
|
this.outputPath = path.resolve(__dirname, '..', '..', '..', '..', 'client', 'public', 'images');
|
||||||
|
|
@ -120,7 +124,7 @@ Guidelines:
|
||||||
await saveImageFromUrl(theImageUrl, this.outputPath, imageName);
|
await saveImageFromUrl(theImageUrl, this.outputPath, imageName);
|
||||||
this.result = this.getMarkdownImageUrl(imageName);
|
this.result = this.getMarkdownImageUrl(imageName);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error while saving the image:', error);
|
logger.error('Error while saving the DALL-E image:', error);
|
||||||
this.result = theImageUrl;
|
this.result = theImageUrl;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
const { Tool } = require('langchain/tools');
|
|
||||||
const { google } = require('googleapis');
|
const { google } = require('googleapis');
|
||||||
|
const { Tool } = require('langchain/tools');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Represents a tool that allows an agent to use the Google Custom Search API.
|
* Represents a tool that allows an agent to use the Google Custom Search API.
|
||||||
|
|
@ -86,7 +87,7 @@ class GoogleSearchAPI extends Tool {
|
||||||
});
|
});
|
||||||
|
|
||||||
// return response.data;
|
// return response.data;
|
||||||
// console.log(response.data);
|
// logger.debug(response.data);
|
||||||
|
|
||||||
if (!response.data.items || response.data.items.length === 0) {
|
if (!response.data.items || response.data.items.length === 0) {
|
||||||
return this.resultsToReadableFormat([
|
return this.resultsToReadableFormat([
|
||||||
|
|
@ -110,7 +111,7 @@ class GoogleSearchAPI extends Tool {
|
||||||
|
|
||||||
return this.resultsToReadableFormat(metadataResults);
|
return this.resultsToReadableFormat(metadataResults);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log(`Error searching Google: ${error}`);
|
logger.error('[GoogleSearchAPI]', error);
|
||||||
// throw error;
|
// throw error;
|
||||||
return 'There was an error searching Google.';
|
return 'There was an error searching Google.';
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,108 +0,0 @@
|
||||||
const { Tool } = require('langchain/tools');
|
|
||||||
|
|
||||||
// class RequestsGetTool extends Tool {
|
|
||||||
// constructor(headers = {}, { maxOutputLength } = {}) {
|
|
||||||
// super();
|
|
||||||
// this.name = 'requests_get';
|
|
||||||
// this.headers = headers;
|
|
||||||
// this.maxOutputLength = maxOutputLength || 2000;
|
|
||||||
// this.description = `A portal to the internet. Use this when you need to get specific content from a website.
|
|
||||||
// - Input should be a url (i.e. https://www.google.com). The output will be the text response of the GET request.`;
|
|
||||||
// }
|
|
||||||
|
|
||||||
// async _call(input) {
|
|
||||||
// const res = await fetch(input, {
|
|
||||||
// headers: this.headers
|
|
||||||
// });
|
|
||||||
// const text = await res.text();
|
|
||||||
// return text.slice(0, this.maxOutputLength);
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
// class RequestsPostTool extends Tool {
|
|
||||||
// constructor(headers = {}, { maxOutputLength } = {}) {
|
|
||||||
// super();
|
|
||||||
// this.name = 'requests_post';
|
|
||||||
// this.headers = headers;
|
|
||||||
// this.maxOutputLength = maxOutputLength || Infinity;
|
|
||||||
// this.description = `Use this when you want to POST to a website.
|
|
||||||
// - Input should be a json string with two keys: "url" and "data".
|
|
||||||
// - The value of "url" should be a string, and the value of "data" should be a dictionary of
|
|
||||||
// - key-value pairs you want to POST to the url as a JSON body.
|
|
||||||
// - Be careful to always use double quotes for strings in the json string
|
|
||||||
// - The output will be the text response of the POST request.`;
|
|
||||||
// }
|
|
||||||
|
|
||||||
// async _call(input) {
|
|
||||||
// try {
|
|
||||||
// const { url, data } = JSON.parse(input);
|
|
||||||
// const res = await fetch(url, {
|
|
||||||
// method: 'POST',
|
|
||||||
// headers: this.headers,
|
|
||||||
// body: JSON.stringify(data)
|
|
||||||
// });
|
|
||||||
// const text = await res.text();
|
|
||||||
// return text.slice(0, this.maxOutputLength);
|
|
||||||
// } catch (error) {
|
|
||||||
// return `${error}`;
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
class HttpRequestTool extends Tool {
|
|
||||||
constructor(headers = {}, { maxOutputLength = Infinity } = {}) {
|
|
||||||
super();
|
|
||||||
this.headers = headers;
|
|
||||||
this.name = 'http_request';
|
|
||||||
this.maxOutputLength = maxOutputLength;
|
|
||||||
this.description =
|
|
||||||
'Executes HTTP methods (GET, POST, PUT, DELETE, etc.). The input is an object with three keys: "url", "method", and "data". Even for GET or DELETE, include "data" key as an empty string. "method" is the HTTP method, and "url" is the desired endpoint. If POST or PUT, "data" should contain a stringified JSON representing the body to send. Only one url per use.';
|
|
||||||
}
|
|
||||||
|
|
||||||
async _call(input) {
|
|
||||||
try {
|
|
||||||
const urlPattern = /"url":\s*"([^"]*)"/;
|
|
||||||
const methodPattern = /"method":\s*"([^"]*)"/;
|
|
||||||
const dataPattern = /"data":\s*"([^"]*)"/;
|
|
||||||
|
|
||||||
const url = input.match(urlPattern)[1];
|
|
||||||
const method = input.match(methodPattern)[1];
|
|
||||||
let data = input.match(dataPattern)[1];
|
|
||||||
|
|
||||||
// Parse 'data' back to JSON if possible
|
|
||||||
try {
|
|
||||||
data = JSON.parse(data);
|
|
||||||
} catch (e) {
|
|
||||||
// If it's not a JSON string, keep it as is
|
|
||||||
}
|
|
||||||
|
|
||||||
let options = {
|
|
||||||
method: method,
|
|
||||||
headers: this.headers,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (['POST', 'PUT', 'PATCH'].includes(method.toUpperCase()) && data) {
|
|
||||||
if (typeof data === 'object') {
|
|
||||||
options.body = JSON.stringify(data);
|
|
||||||
} else {
|
|
||||||
options.body = data;
|
|
||||||
}
|
|
||||||
options.headers['Content-Type'] = 'application/json';
|
|
||||||
}
|
|
||||||
|
|
||||||
const res = await fetch(url, options);
|
|
||||||
|
|
||||||
const text = await res.text();
|
|
||||||
if (text.includes('<html')) {
|
|
||||||
return 'This tool is not designed to browse web pages. Only use it for API calls.';
|
|
||||||
}
|
|
||||||
|
|
||||||
return text.slice(0, this.maxOutputLength);
|
|
||||||
} catch (error) {
|
|
||||||
console.log(error);
|
|
||||||
return `${error}`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = HttpRequestTool;
|
|
||||||
|
|
@ -1,9 +1,10 @@
|
||||||
// Generates image using stable diffusion webui's api (automatic1111)
|
// Generates image using stable diffusion webui's api (automatic1111)
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const { Tool } = require('langchain/tools');
|
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
const axios = require('axios');
|
const axios = require('axios');
|
||||||
const sharp = require('sharp');
|
const sharp = require('sharp');
|
||||||
|
const { Tool } = require('langchain/tools');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
class StableDiffusionAPI extends Tool {
|
class StableDiffusionAPI extends Tool {
|
||||||
constructor(fields) {
|
constructor(fields) {
|
||||||
|
|
@ -81,7 +82,7 @@ Guidelines:
|
||||||
.toFile(this.outputPath + '/' + imageName);
|
.toFile(this.outputPath + '/' + imageName);
|
||||||
this.result = this.getMarkdownImageUrl(imageName);
|
this.result = this.getMarkdownImageUrl(imageName);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error while saving the image:', error);
|
logger.error('[StableDiffusion] Error while saving the image:', error);
|
||||||
// this.result = theImageUrl;
|
// this.result = theImageUrl;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
/* eslint-disable no-useless-escape */
|
/* eslint-disable no-useless-escape */
|
||||||
const axios = require('axios');
|
const axios = require('axios');
|
||||||
const { Tool } = require('langchain/tools');
|
const { Tool } = require('langchain/tools');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
class WolframAlphaAPI extends Tool {
|
class WolframAlphaAPI extends Tool {
|
||||||
constructor(fields) {
|
constructor(fields) {
|
||||||
|
|
@ -38,7 +39,7 @@ General guidelines:
|
||||||
const response = await axios.get(url, { responseType: 'text' });
|
const response = await axios.get(url, { responseType: 'text' });
|
||||||
return response.data;
|
return response.data;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Error fetching raw text: ${error}`);
|
logger.error('[WolframAlphaAPI] Error fetching raw text:', error);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -68,11 +69,10 @@ General guidelines:
|
||||||
return response;
|
return response;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (error.response && error.response.data) {
|
if (error.response && error.response.data) {
|
||||||
console.log('Error data:', error.response.data);
|
logger.error('[WolframAlphaAPI] Error data:', error);
|
||||||
return error.response.data;
|
return error.response.data;
|
||||||
} else {
|
} else {
|
||||||
console.log('Error querying Wolfram Alpha', error.message);
|
logger.error('[WolframAlphaAPI] Error querying Wolfram Alpha', error);
|
||||||
// throw error;
|
|
||||||
return 'There was an error querying Wolfram Alpha.';
|
return 'There was an error querying Wolfram Alpha.';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,12 @@
|
||||||
require('dotenv').config();
|
require('dotenv').config();
|
||||||
const { z } = require('zod');
|
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const yaml = require('js-yaml');
|
const { z } = require('zod');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
const { DynamicStructuredTool } = require('langchain/tools');
|
const yaml = require('js-yaml');
|
||||||
const { createOpenAPIChain } = require('langchain/chains');
|
const { createOpenAPIChain } = require('langchain/chains');
|
||||||
|
const { DynamicStructuredTool } = require('langchain/tools');
|
||||||
const { ChatPromptTemplate, HumanMessagePromptTemplate } = require('langchain/prompts');
|
const { ChatPromptTemplate, HumanMessagePromptTemplate } = require('langchain/prompts');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
function addLinePrefix(text, prefix = '// ') {
|
function addLinePrefix(text, prefix = '// ') {
|
||||||
return text
|
return text
|
||||||
|
|
@ -52,7 +53,7 @@ async function readSpecFile(filePath) {
|
||||||
}
|
}
|
||||||
return yaml.load(fileContents);
|
return yaml.load(fileContents);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error(e);
|
logger.error('[readSpecFile] error', e);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -83,54 +84,51 @@ async function getSpec(url) {
|
||||||
return ValidSpecPath.parse(url);
|
return ValidSpecPath.parse(url);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function createOpenAPIPlugin({ data, llm, user, message, memory, signal, verbose = false }) {
|
async function createOpenAPIPlugin({ data, llm, user, message, memory, signal }) {
|
||||||
let spec;
|
let spec;
|
||||||
try {
|
try {
|
||||||
spec = await getSpec(data.api.url, verbose);
|
spec = await getSpec(data.api.url);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
verbose && console.debug('getSpec error', error);
|
logger.error('[createOpenAPIPlugin] getSpec error', error);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!spec) {
|
if (!spec) {
|
||||||
verbose && console.debug('No spec found');
|
logger.warn('[createOpenAPIPlugin] No spec found');
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
const headers = {};
|
const headers = {};
|
||||||
const { auth, name_for_model, description_for_model, description_for_human } = data;
|
const { auth, name_for_model, description_for_model, description_for_human } = data;
|
||||||
if (auth && AuthDefinition.parse(auth)) {
|
if (auth && AuthDefinition.parse(auth)) {
|
||||||
verbose && console.debug('auth detected', auth);
|
logger.debug('[createOpenAPIPlugin] auth detected', auth);
|
||||||
const { openai } = auth.verification_tokens;
|
const { openai } = auth.verification_tokens;
|
||||||
if (AuthBearer.parse(auth)) {
|
if (AuthBearer.parse(auth)) {
|
||||||
headers.authorization = `Bearer ${openai}`;
|
headers.authorization = `Bearer ${openai}`;
|
||||||
verbose && console.debug('added auth bearer', headers);
|
logger.debug('[createOpenAPIPlugin] added auth bearer', headers);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const chainOptions = {
|
const chainOptions = { llm };
|
||||||
llm,
|
|
||||||
verbose,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (data.headers && data.headers['librechat_user_id']) {
|
if (data.headers && data.headers['librechat_user_id']) {
|
||||||
verbose && console.debug('id detected', headers);
|
logger.debug('[createOpenAPIPlugin] id detected', headers);
|
||||||
headers[data.headers['librechat_user_id']] = user;
|
headers[data.headers['librechat_user_id']] = user;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (Object.keys(headers).length > 0) {
|
if (Object.keys(headers).length > 0) {
|
||||||
verbose && console.debug('headers detected', headers);
|
logger.debug('[createOpenAPIPlugin] headers detected', headers);
|
||||||
chainOptions.headers = headers;
|
chainOptions.headers = headers;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (data.params) {
|
if (data.params) {
|
||||||
verbose && console.debug('params detected', data.params);
|
logger.debug('[createOpenAPIPlugin] params detected', data.params);
|
||||||
chainOptions.params = data.params;
|
chainOptions.params = data.params;
|
||||||
}
|
}
|
||||||
|
|
||||||
let history = '';
|
let history = '';
|
||||||
if (memory) {
|
if (memory) {
|
||||||
verbose && console.debug('openAPI chain: memory detected', memory);
|
logger.debug('[createOpenAPIPlugin] openAPI chain: memory detected', memory);
|
||||||
const { history: chat_history } = await memory.loadMemoryVariables({});
|
const { history: chat_history } = await memory.loadMemoryVariables({});
|
||||||
history = chat_history?.length > 0 ? `\n\n## Chat History:\n${chat_history}\n` : '';
|
history = chat_history?.length > 0 ? `\n\n## Chat History:\n${chat_history}\n` : '';
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,4 @@
|
||||||
const GoogleSearchAPI = require('./GoogleSearch');
|
const GoogleSearchAPI = require('./GoogleSearch');
|
||||||
const HttpRequestTool = require('./HttpRequestTool');
|
|
||||||
const AIPluginTool = require('./AIPluginTool');
|
|
||||||
const OpenAICreateImage = require('./DALL-E');
|
const OpenAICreateImage = require('./DALL-E');
|
||||||
const DALLE3 = require('./structured/DALLE3');
|
const DALLE3 = require('./structured/DALLE3');
|
||||||
const StructuredSD = require('./structured/StableDiffusion');
|
const StructuredSD = require('./structured/StableDiffusion');
|
||||||
|
|
@ -20,8 +18,6 @@ const CodeBrew = require('./CodeBrew');
|
||||||
module.exports = {
|
module.exports = {
|
||||||
availableTools,
|
availableTools,
|
||||||
GoogleSearchAPI,
|
GoogleSearchAPI,
|
||||||
HttpRequestTool,
|
|
||||||
AIPluginTool,
|
|
||||||
OpenAICreateImage,
|
OpenAICreateImage,
|
||||||
DALLE3,
|
DALLE3,
|
||||||
StableDiffusionAPI,
|
StableDiffusionAPI,
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
const axios = require('axios');
|
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
|
const axios = require('axios');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
async function saveImageFromUrl(url, outputPath, outputFilename) {
|
async function saveImageFromUrl(url, outputPath, outputFilename) {
|
||||||
try {
|
try {
|
||||||
|
|
@ -32,7 +33,7 @@ async function saveImageFromUrl(url, outputPath, outputFilename) {
|
||||||
writer.on('error', reject);
|
writer.on('error', reject);
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error while saving the image:', error);
|
logger.error('[saveImageFromUrl] Error while saving the image:', error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
const { StructuredTool } = require('langchain/tools');
|
|
||||||
const { z } = require('zod');
|
const { z } = require('zod');
|
||||||
|
const { StructuredTool } = require('langchain/tools');
|
||||||
const { SearchClient, AzureKeyCredential } = require('@azure/search-documents');
|
const { SearchClient, AzureKeyCredential } = require('@azure/search-documents');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
class AzureAISearch extends StructuredTool {
|
class AzureAISearch extends StructuredTool {
|
||||||
// Constants for default values
|
// Constants for default values
|
||||||
|
|
@ -94,7 +95,7 @@ class AzureAISearch extends StructuredTool {
|
||||||
}
|
}
|
||||||
return JSON.stringify(resultDocuments);
|
return JSON.stringify(resultDocuments);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Azure AI Search request failed: ${error.message}`);
|
logger.error('Azure AI Search request failed', error);
|
||||||
return 'There was an error with Azure AI Search.';
|
return 'There was an error with Azure AI Search.';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -28,14 +28,14 @@ class RunCode extends StructuredTool {
|
||||||
}
|
}
|
||||||
|
|
||||||
async _call({ code, language = 'python' }) {
|
async _call({ code, language = 'python' }) {
|
||||||
// console.log('<--------------- Running Code --------------->', { code, language });
|
// logger.debug('<--------------- Running Code --------------->', { code, language });
|
||||||
const response = await axios({
|
const response = await axios({
|
||||||
url: `${this.url}/repl`,
|
url: `${this.url}/repl`,
|
||||||
method: 'post',
|
method: 'post',
|
||||||
headers: this.headers,
|
headers: this.headers,
|
||||||
data: { code, language },
|
data: { code, language },
|
||||||
});
|
});
|
||||||
// console.log('<--------------- Sucessfully ran Code --------------->', response.data);
|
// logger.debug('<--------------- Sucessfully ran Code --------------->', response.data);
|
||||||
return response.data.result;
|
return response.data.result;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -42,14 +42,14 @@ class RunCode extends StructuredTool {
|
||||||
}
|
}
|
||||||
|
|
||||||
async _call({ code, language = 'python' }) {
|
async _call({ code, language = 'python' }) {
|
||||||
// console.log('<--------------- Running Code --------------->', { code, language });
|
// logger.debug('<--------------- Running Code --------------->', { code, language });
|
||||||
const response = await axios({
|
const response = await axios({
|
||||||
url: `${this.url}/repl`,
|
url: `${this.url}/repl`,
|
||||||
method: 'post',
|
method: 'post',
|
||||||
headers: this.headers,
|
headers: this.headers,
|
||||||
data: { code, language },
|
data: { code, language },
|
||||||
});
|
});
|
||||||
// console.log('<--------------- Sucessfully ran Code --------------->', response.data);
|
// logger.debug('<--------------- Sucessfully ran Code --------------->', response.data);
|
||||||
return response.data.result;
|
return response.data.result;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,9 @@ const OpenAI = require('openai');
|
||||||
const { Tool } = require('langchain/tools');
|
const { Tool } = require('langchain/tools');
|
||||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||||
const saveImageFromUrl = require('../saveImageFromUrl');
|
const saveImageFromUrl = require('../saveImageFromUrl');
|
||||||
const extractBaseURL = require('../../../../utils/extractBaseURL');
|
const extractBaseURL = require('~/utils/extractBaseURL');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const { DALLE3_SYSTEM_PROMPT, DALLE_REVERSE_PROXY, PROXY } = process.env;
|
const { DALLE3_SYSTEM_PROMPT, DALLE_REVERSE_PROXY, PROXY } = process.env;
|
||||||
class DALLE3 extends Tool {
|
class DALLE3 extends Tool {
|
||||||
constructor(fields = {}) {
|
constructor(fields = {}) {
|
||||||
|
|
@ -126,9 +128,12 @@ Error Message: ${error.message}`;
|
||||||
|
|
||||||
if (match) {
|
if (match) {
|
||||||
imageName = match[0];
|
imageName = match[0];
|
||||||
console.log(imageName); // Output: img-lgCf7ppcbhqQrz6a5ear6FOb.png
|
logger.debug('[DALL-E-3]', { imageName }); // Output: img-lgCf7ppcbhqQrz6a5ear6FOb.png
|
||||||
} else {
|
} else {
|
||||||
console.log('No image name found in the string.');
|
logger.debug('[DALL-E-3] No image name found in the string.', {
|
||||||
|
theImageUrl,
|
||||||
|
data: resp.data[0],
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
this.outputPath = path.resolve(
|
this.outputPath = path.resolve(
|
||||||
|
|
@ -154,7 +159,7 @@ Error Message: ${error.message}`;
|
||||||
await saveImageFromUrl(theImageUrl, this.outputPath, imageName);
|
await saveImageFromUrl(theImageUrl, this.outputPath, imageName);
|
||||||
this.result = this.getMarkdownImageUrl(imageName);
|
this.result = this.getMarkdownImageUrl(imageName);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error while saving the image:', error);
|
logger.error('Error while saving the image:', error);
|
||||||
this.result = theImageUrl;
|
this.result = theImageUrl;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,10 @@
|
||||||
|
const { z } = require('zod');
|
||||||
|
const axios = require('axios');
|
||||||
const { StructuredTool } = require('langchain/tools');
|
const { StructuredTool } = require('langchain/tools');
|
||||||
const { PromptTemplate } = require('langchain/prompts');
|
const { PromptTemplate } = require('langchain/prompts');
|
||||||
const { createExtractionChainFromZod } = require('./extractionChain');
|
|
||||||
// const { ChatOpenAI } = require('langchain/chat_models/openai');
|
// const { ChatOpenAI } = require('langchain/chat_models/openai');
|
||||||
const axios = require('axios');
|
const { createExtractionChainFromZod } = require('./extractionChain');
|
||||||
const { z } = require('zod');
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const envs = ['Nodejs', 'Go', 'Bash', 'Rust', 'Python3', 'PHP', 'Java', 'Perl', 'DotNET'];
|
const envs = ['Nodejs', 'Go', 'Bash', 'Rust', 'Python3', 'PHP', 'Java', 'Perl', 'DotNET'];
|
||||||
const env = z.enum(envs);
|
const env = z.enum(envs);
|
||||||
|
|
@ -34,8 +35,8 @@ async function extractEnvFromCode(code, model) {
|
||||||
// const chatModel = new ChatOpenAI({ openAIApiKey, modelName: 'gpt-4-0613', temperature: 0 });
|
// const chatModel = new ChatOpenAI({ openAIApiKey, modelName: 'gpt-4-0613', temperature: 0 });
|
||||||
const chain = createExtractionChainFromZod(zodSchema, model, { prompt, verbose: true });
|
const chain = createExtractionChainFromZod(zodSchema, model, { prompt, verbose: true });
|
||||||
const result = await chain.run(code);
|
const result = await chain.run(code);
|
||||||
console.log('<--------------- extractEnvFromCode --------------->');
|
logger.debug('<--------------- extractEnvFromCode --------------->');
|
||||||
console.log(result);
|
logger.debug(result);
|
||||||
return result.env;
|
return result.env;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -69,7 +70,7 @@ class RunCommand extends StructuredTool {
|
||||||
}
|
}
|
||||||
|
|
||||||
async _call(data) {
|
async _call(data) {
|
||||||
console.log(`<--------------- Running ${data} --------------->`);
|
logger.debug(`<--------------- Running ${data} --------------->`);
|
||||||
const response = await axios({
|
const response = await axios({
|
||||||
url: `${this.url}/commands`,
|
url: `${this.url}/commands`,
|
||||||
method: 'post',
|
method: 'post',
|
||||||
|
|
@ -96,7 +97,7 @@ class ReadFile extends StructuredTool {
|
||||||
}
|
}
|
||||||
|
|
||||||
async _call(data) {
|
async _call(data) {
|
||||||
console.log(`<--------------- Reading ${data} --------------->`);
|
logger.debug(`<--------------- Reading ${data} --------------->`);
|
||||||
const response = await axios.get(`${this.url}/files`, { params: data, headers: this.headers });
|
const response = await axios.get(`${this.url}/files`, { params: data, headers: this.headers });
|
||||||
return response.data;
|
return response.data;
|
||||||
}
|
}
|
||||||
|
|
@ -121,12 +122,12 @@ class WriteFile extends StructuredTool {
|
||||||
|
|
||||||
async _call(data) {
|
async _call(data) {
|
||||||
let { env, path, content } = data;
|
let { env, path, content } = data;
|
||||||
console.log(`<--------------- environment ${env} typeof ${typeof env}--------------->`);
|
logger.debug(`<--------------- environment ${env} typeof ${typeof env}--------------->`);
|
||||||
if (env && !envs.includes(env)) {
|
if (env && !envs.includes(env)) {
|
||||||
console.log(`<--------------- Invalid environment ${env} --------------->`);
|
logger.debug(`<--------------- Invalid environment ${env} --------------->`);
|
||||||
env = await extractEnvFromCode(content, this.model);
|
env = await extractEnvFromCode(content, this.model);
|
||||||
} else if (!env) {
|
} else if (!env) {
|
||||||
console.log('<--------------- Undefined environment --------------->');
|
logger.debug('<--------------- Undefined environment --------------->');
|
||||||
env = await extractEnvFromCode(content, this.model);
|
env = await extractEnvFromCode(content, this.model);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -139,7 +140,7 @@ class WriteFile extends StructuredTool {
|
||||||
content,
|
content,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
console.log('Writing to file', JSON.stringify(payload));
|
logger.debug('Writing to file', JSON.stringify(payload));
|
||||||
|
|
||||||
await axios({
|
await axios({
|
||||||
url: `${this.url}/files`,
|
url: `${this.url}/files`,
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,11 @@
|
||||||
// Generates image using stable diffusion webui's api (automatic1111)
|
// Generates image using stable diffusion webui's api (automatic1111)
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const { StructuredTool } = require('langchain/tools');
|
|
||||||
const { z } = require('zod');
|
const { z } = require('zod');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
const axios = require('axios');
|
const axios = require('axios');
|
||||||
const sharp = require('sharp');
|
const sharp = require('sharp');
|
||||||
|
const { StructuredTool } = require('langchain/tools');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
class StableDiffusionAPI extends StructuredTool {
|
class StableDiffusionAPI extends StructuredTool {
|
||||||
constructor(fields) {
|
constructor(fields) {
|
||||||
|
|
@ -107,7 +108,7 @@ class StableDiffusionAPI extends StructuredTool {
|
||||||
.toFile(this.outputPath + '/' + imageName);
|
.toFile(this.outputPath + '/' + imageName);
|
||||||
this.result = this.getMarkdownImageUrl(imageName);
|
this.result = this.getMarkdownImageUrl(imageName);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error while saving the image:', error);
|
logger.error('[StableDiffusion] Error while saving the image:', error);
|
||||||
// this.result = theImageUrl;
|
// this.result = theImageUrl;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,8 @@
|
||||||
/* eslint-disable no-useless-escape */
|
/* eslint-disable no-useless-escape */
|
||||||
const axios = require('axios');
|
const axios = require('axios');
|
||||||
const { StructuredTool } = require('langchain/tools');
|
|
||||||
const { z } = require('zod');
|
const { z } = require('zod');
|
||||||
|
const { StructuredTool } = require('langchain/tools');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
class WolframAlphaAPI extends StructuredTool {
|
class WolframAlphaAPI extends StructuredTool {
|
||||||
constructor(fields) {
|
constructor(fields) {
|
||||||
|
|
@ -47,7 +48,7 @@ class WolframAlphaAPI extends StructuredTool {
|
||||||
const response = await axios.get(url, { responseType: 'text' });
|
const response = await axios.get(url, { responseType: 'text' });
|
||||||
return response.data;
|
return response.data;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Error fetching raw text: ${error}`);
|
logger.error('[WolframAlphaAPI] Error fetching raw text:', error);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -78,11 +79,10 @@ class WolframAlphaAPI extends StructuredTool {
|
||||||
return response;
|
return response;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (error.response && error.response.data) {
|
if (error.response && error.response.data) {
|
||||||
console.log('Error data:', error.response.data);
|
logger.error('[WolframAlphaAPI] Error data:', error);
|
||||||
return error.response.data;
|
return error.response.data;
|
||||||
} else {
|
} else {
|
||||||
console.log('Error querying Wolfram Alpha', error.message);
|
logger.error('[WolframAlphaAPI] Error querying Wolfram Alpha', error);
|
||||||
// throw error;
|
|
||||||
return 'There was an error querying Wolfram Alpha.';
|
return 'There was an error querying Wolfram Alpha.';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,7 @@ const path = require('path');
|
||||||
const OpenAI = require('openai');
|
const OpenAI = require('openai');
|
||||||
const DALLE3 = require('../DALLE3');
|
const DALLE3 = require('../DALLE3');
|
||||||
const saveImageFromUrl = require('../../saveImageFromUrl');
|
const saveImageFromUrl = require('../../saveImageFromUrl');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
jest.mock('openai');
|
jest.mock('openai');
|
||||||
|
|
||||||
|
|
@ -145,10 +146,13 @@ describe('DALLE3', () => {
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
console.log = jest.fn(); // Mock console.log
|
|
||||||
generate.mockResolvedValue(mockResponse);
|
generate.mockResolvedValue(mockResponse);
|
||||||
await dalle._call(mockData);
|
await dalle._call(mockData);
|
||||||
expect(console.log).toHaveBeenCalledWith('No image name found in the string.');
|
expect(logger.debug).toHaveBeenCalledWith('[DALL-E-3] No image name found in the string.', {
|
||||||
|
data: { url: 'http://example.com/invalid-url' },
|
||||||
|
theImageUrl: 'http://example.com/invalid-url',
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should create the directory if it does not exist', async () => {
|
it('should create the directory if it does not exist', async () => {
|
||||||
|
|
@ -182,9 +186,8 @@ describe('DALLE3', () => {
|
||||||
const error = new Error('Error while saving the image');
|
const error = new Error('Error while saving the image');
|
||||||
generate.mockResolvedValue(mockResponse);
|
generate.mockResolvedValue(mockResponse);
|
||||||
saveImageFromUrl.mockRejectedValue(error);
|
saveImageFromUrl.mockRejectedValue(error);
|
||||||
console.error = jest.fn(); // Mock console.error
|
|
||||||
const result = await dalle._call(mockData);
|
const result = await dalle._call(mockData);
|
||||||
expect(console.error).toHaveBeenCalledWith('Error while saving the image:', error);
|
expect(logger.error).toHaveBeenCalledWith('Error while saving the image:', error);
|
||||||
expect(result).toBe(mockResponse.data[0].url);
|
expect(result).toBe(mockResponse.data[0].url);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
const OpenAI = require('openai');
|
const OpenAI = require('openai');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handles errors that may occur when making requests to OpenAI's API.
|
* Handles errors that may occur when making requests to OpenAI's API.
|
||||||
|
|
@ -12,14 +13,14 @@ const OpenAI = require('openai');
|
||||||
*/
|
*/
|
||||||
async function handleOpenAIErrors(err, errorCallback, context = 'stream') {
|
async function handleOpenAIErrors(err, errorCallback, context = 'stream') {
|
||||||
if (err instanceof OpenAI.APIError && err?.message?.includes('abort')) {
|
if (err instanceof OpenAI.APIError && err?.message?.includes('abort')) {
|
||||||
console.warn(`[OpenAIClient.chatCompletion][${context}] Aborted Message`);
|
logger.warn(`[OpenAIClient.chatCompletion][${context}] Aborted Message`);
|
||||||
}
|
}
|
||||||
if (err instanceof OpenAI.OpenAIError && err?.message?.includes('missing finish_reason')) {
|
if (err instanceof OpenAI.OpenAIError && err?.message?.includes('missing finish_reason')) {
|
||||||
console.warn(`[OpenAIClient.chatCompletion][${context}] Missing finish_reason`);
|
logger.warn(`[OpenAIClient.chatCompletion][${context}] Missing finish_reason`);
|
||||||
} else if (err instanceof OpenAI.APIError) {
|
} else if (err instanceof OpenAI.APIError) {
|
||||||
console.warn(`[OpenAIClient.chatCompletion][${context}] API Error`);
|
logger.warn(`[OpenAIClient.chatCompletion][${context}] API error`);
|
||||||
} else {
|
} else {
|
||||||
console.warn(`[OpenAIClient.chatCompletion][${context}] Unhandled error type`);
|
logger.warn(`[OpenAIClient.chatCompletion][${context}] Unhandled error type`);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (errorCallback) {
|
if (errorCallback) {
|
||||||
|
|
|
||||||
|
|
@ -1,17 +1,14 @@
|
||||||
const { getUserPluginAuthValue } = require('../../../../server/services/PluginService');
|
|
||||||
const { OpenAIEmbeddings } = require('langchain/embeddings/openai');
|
|
||||||
const { ZapierToolKit } = require('langchain/agents');
|
const { ZapierToolKit } = require('langchain/agents');
|
||||||
const { SerpAPI, ZapierNLAWrapper } = require('langchain/tools');
|
|
||||||
const { ChatOpenAI } = require('langchain/chat_models/openai');
|
|
||||||
const { Calculator } = require('langchain/tools/calculator');
|
const { Calculator } = require('langchain/tools/calculator');
|
||||||
const { WebBrowser } = require('langchain/tools/webbrowser');
|
const { WebBrowser } = require('langchain/tools/webbrowser');
|
||||||
|
const { SerpAPI, ZapierNLAWrapper } = require('langchain/tools');
|
||||||
|
const { OpenAIEmbeddings } = require('langchain/embeddings/openai');
|
||||||
|
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
|
||||||
const {
|
const {
|
||||||
availableTools,
|
availableTools,
|
||||||
AIPluginTool,
|
|
||||||
GoogleSearchAPI,
|
GoogleSearchAPI,
|
||||||
WolframAlphaAPI,
|
WolframAlphaAPI,
|
||||||
StructuredWolfram,
|
StructuredWolfram,
|
||||||
HttpRequestTool,
|
|
||||||
OpenAICreateImage,
|
OpenAICreateImage,
|
||||||
StableDiffusionAPI,
|
StableDiffusionAPI,
|
||||||
DALLE3,
|
DALLE3,
|
||||||
|
|
@ -23,8 +20,9 @@ const {
|
||||||
CodeSherpaTools,
|
CodeSherpaTools,
|
||||||
CodeBrew,
|
CodeBrew,
|
||||||
} = require('../');
|
} = require('../');
|
||||||
const { loadSpecs } = require('./loadSpecs');
|
|
||||||
const { loadToolSuite } = require('./loadToolSuite');
|
const { loadToolSuite } = require('./loadToolSuite');
|
||||||
|
const { loadSpecs } = require('./loadSpecs');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const getOpenAIKey = async (options, user) => {
|
const getOpenAIKey = async (options, user) => {
|
||||||
let openAIApiKey = options.openAIApiKey ?? process.env.OPENAI_API_KEY;
|
let openAIApiKey = options.openAIApiKey ?? process.env.OPENAI_API_KEY;
|
||||||
|
|
@ -64,7 +62,7 @@ const validateTools = async (user, tools = []) => {
|
||||||
|
|
||||||
return Array.from(validToolsSet.values());
|
return Array.from(validToolsSet.values());
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log('There was a problem validating tools', err);
|
logger.error('[validateTools] There was a problem validating tools', err);
|
||||||
throw new Error(err);
|
throw new Error(err);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
@ -161,15 +159,6 @@ const loadTools = async ({
|
||||||
const zapier = new ZapierNLAWrapper({ apiKey });
|
const zapier = new ZapierNLAWrapper({ apiKey });
|
||||||
return ZapierToolKit.fromZapierNLAWrapper(zapier);
|
return ZapierToolKit.fromZapierNLAWrapper(zapier);
|
||||||
},
|
},
|
||||||
plugins: async () => {
|
|
||||||
return [
|
|
||||||
new HttpRequestTool(),
|
|
||||||
await AIPluginTool.fromPluginUrl(
|
|
||||||
'https://www.klarna.com/.well-known/ai-plugin.json',
|
|
||||||
new ChatOpenAI({ openAIApiKey: options.openAIApiKey, temperature: 0 }),
|
|
||||||
),
|
|
||||||
];
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const requestedTools = {};
|
const requestedTools = {};
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,8 @@
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
const { z } = require('zod');
|
const { z } = require('zod');
|
||||||
const { createOpenAPIPlugin } = require('../dynamic/OpenAPIPlugin');
|
const { logger } = require('~/config');
|
||||||
|
const { createOpenAPIPlugin } = require('~/app/clients/tools/dynamic/OpenAPIPlugin');
|
||||||
|
|
||||||
// The minimum Manifest definition
|
// The minimum Manifest definition
|
||||||
const ManifestDefinition = z.object({
|
const ManifestDefinition = z.object({
|
||||||
|
|
@ -26,28 +27,17 @@ const ManifestDefinition = z.object({
|
||||||
legal_info_url: z.string().optional(),
|
legal_info_url: z.string().optional(),
|
||||||
});
|
});
|
||||||
|
|
||||||
function validateJson(json, verbose = true) {
|
function validateJson(json) {
|
||||||
try {
|
try {
|
||||||
return ManifestDefinition.parse(json);
|
return ManifestDefinition.parse(json);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (verbose) {
|
logger.debug('[validateJson] manifest parsing error', error);
|
||||||
console.debug('validateJson error', error);
|
|
||||||
}
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// omit the LLM to return the well known jsons as objects
|
// omit the LLM to return the well known jsons as objects
|
||||||
async function loadSpecs({
|
async function loadSpecs({ llm, user, message, tools = [], map = false, memory, signal }) {
|
||||||
llm,
|
|
||||||
user,
|
|
||||||
message,
|
|
||||||
tools = [],
|
|
||||||
map = false,
|
|
||||||
memory,
|
|
||||||
signal,
|
|
||||||
verbose = false,
|
|
||||||
}) {
|
|
||||||
const directoryPath = path.join(__dirname, '..', '.well-known');
|
const directoryPath = path.join(__dirname, '..', '.well-known');
|
||||||
let files = [];
|
let files = [];
|
||||||
|
|
||||||
|
|
@ -60,7 +50,7 @@ async function loadSpecs({
|
||||||
await fs.promises.access(filePath, fs.constants.F_OK);
|
await fs.promises.access(filePath, fs.constants.F_OK);
|
||||||
files.push(tools[i] + '.json');
|
files.push(tools[i] + '.json');
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(`File ${tools[i] + '.json'} does not exist`);
|
logger.error(`[loadSpecs] File ${tools[i] + '.json'} does not exist`, err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -73,9 +63,7 @@ async function loadSpecs({
|
||||||
const validJsons = [];
|
const validJsons = [];
|
||||||
const constructorMap = {};
|
const constructorMap = {};
|
||||||
|
|
||||||
if (verbose) {
|
logger.debug('[validateJson] files', files);
|
||||||
console.debug('files', files);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
if (path.extname(file) === '.json') {
|
if (path.extname(file) === '.json') {
|
||||||
|
|
@ -84,7 +72,7 @@ async function loadSpecs({
|
||||||
const json = JSON.parse(fileContent);
|
const json = JSON.parse(fileContent);
|
||||||
|
|
||||||
if (!validateJson(json)) {
|
if (!validateJson(json)) {
|
||||||
verbose && console.debug('Invalid json', json);
|
logger.debug('[validateJson] Invalid json', json);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -97,13 +85,12 @@ async function loadSpecs({
|
||||||
memory,
|
memory,
|
||||||
signal,
|
signal,
|
||||||
user,
|
user,
|
||||||
verbose,
|
|
||||||
});
|
});
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (llm) {
|
if (llm) {
|
||||||
validJsons.push(createOpenAPIPlugin({ data: json, llm, verbose }));
|
validJsons.push(createOpenAPIPlugin({ data: json, llm }));
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -117,10 +104,8 @@ async function loadSpecs({
|
||||||
|
|
||||||
const plugins = (await Promise.all(validJsons)).filter((plugin) => plugin);
|
const plugins = (await Promise.all(validJsons)).filter((plugin) => plugin);
|
||||||
|
|
||||||
// if (verbose) {
|
// logger.debug('[validateJson] plugins', plugins);
|
||||||
// console.debug('plugins', plugins);
|
// logger.debug(plugins[0].name);
|
||||||
// console.debug(plugins[0].name);
|
|
||||||
// }
|
|
||||||
|
|
||||||
return plugins;
|
return plugins;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
const { isEnabled } = require('../server/utils');
|
|
||||||
const throttle = require('lodash/throttle');
|
const throttle = require('lodash/throttle');
|
||||||
|
const { isEnabled } = require('~/server/utils');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const titleConvo = async ({ text, response }) => {
|
const titleConvo = async ({ text, response }) => {
|
||||||
let title = 'New Chat';
|
let title = 'New Chat';
|
||||||
|
|
@ -30,11 +31,10 @@ const titleConvo = async ({ text, response }) => {
|
||||||
const res = await titleGenerator.sendMessage(titlePrompt, options);
|
const res = await titleGenerator.sendMessage(titlePrompt, options);
|
||||||
title = res.response.replace(/Title: /, '').replace(/[".]/g, '');
|
title = res.response.replace(/Title: /, '').replace(/[".]/g, '');
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error(e);
|
logger.error('There was an issue generating title with BingAI', e);
|
||||||
console.log('There was an issue generating title, see error above');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('CONVERSATION TITLE', title);
|
logger.debug('[/ask/bingAI] CONVERSATION TITLE: ' + title);
|
||||||
return title;
|
return title;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
||||||
8
api/cache/banViolation.js
vendored
8
api/cache/banViolation.js
vendored
|
|
@ -1,6 +1,8 @@
|
||||||
const Session = require('../models/Session');
|
const Session = require('~/models/Session');
|
||||||
const getLogStores = require('./getLogStores');
|
const getLogStores = require('./getLogStores');
|
||||||
const { isEnabled, math, removePorts } = require('../server/utils');
|
const { isEnabled, math, removePorts } = require('~/server/utils');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const { BAN_VIOLATIONS, BAN_INTERVAL } = process.env ?? {};
|
const { BAN_VIOLATIONS, BAN_INTERVAL } = process.env ?? {};
|
||||||
const interval = math(BAN_INTERVAL, 20);
|
const interval = math(BAN_INTERVAL, 20);
|
||||||
|
|
||||||
|
|
@ -54,7 +56,7 @@ const banViolation = async (req, res, errorMessage) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
req.ip = removePorts(req);
|
req.ip = removePorts(req);
|
||||||
console.log(
|
logger.info(
|
||||||
`[BAN] Banning user ${user_id} ${req.ip ? `@ ${req.ip} ` : ''}for ${
|
`[BAN] Banning user ${user_id} ${req.ip ? `@ ${req.ip} ` : ''}for ${
|
||||||
duration / 1000 / 60
|
duration / 1000 / 60
|
||||||
} minutes`,
|
} minutes`,
|
||||||
|
|
|
||||||
4
api/cache/keyvMongo.js
vendored
4
api/cache/keyvMongo.js
vendored
|
|
@ -1,7 +1,9 @@
|
||||||
const KeyvMongo = require('@keyv/mongo');
|
const KeyvMongo = require('@keyv/mongo');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const { MONGO_URI } = process.env ?? {};
|
const { MONGO_URI } = process.env ?? {};
|
||||||
|
|
||||||
const keyvMongo = new KeyvMongo(MONGO_URI, { collection: 'logs' });
|
const keyvMongo = new KeyvMongo(MONGO_URI, { collection: 'logs' });
|
||||||
keyvMongo.on('error', (err) => console.error('KeyvMongo connection error:', err));
|
keyvMongo.on('error', (err) => logger.error('KeyvMongo connection error:', err));
|
||||||
|
|
||||||
module.exports = keyvMongo;
|
module.exports = keyvMongo;
|
||||||
|
|
|
||||||
5
api/cache/keyvRedis.js
vendored
5
api/cache/keyvRedis.js
vendored
|
|
@ -1,4 +1,5 @@
|
||||||
const KeyvRedis = require('@keyv/redis');
|
const KeyvRedis = require('@keyv/redis');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const { REDIS_URI } = process.env;
|
const { REDIS_URI } = process.env;
|
||||||
|
|
||||||
|
|
@ -6,9 +7,9 @@ let keyvRedis;
|
||||||
|
|
||||||
if (REDIS_URI) {
|
if (REDIS_URI) {
|
||||||
keyvRedis = new KeyvRedis(REDIS_URI, { useRedisSets: false });
|
keyvRedis = new KeyvRedis(REDIS_URI, { useRedisSets: false });
|
||||||
keyvRedis.on('error', (err) => console.error('KeyvRedis connection error:', err));
|
keyvRedis.on('error', (err) => logger.error('KeyvRedis connection error:', err));
|
||||||
} else {
|
} else {
|
||||||
// console.log('REDIS_URI not provided. Redis module will not be initialized.');
|
logger.info('REDIS_URI not provided. Redis module will not be initialized.');
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = keyvRedis;
|
module.exports = keyvRedis;
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,14 @@
|
||||||
/**
|
/**
|
||||||
* @typedef {Object} CacheKeys
|
* @typedef {Object} CacheKeys
|
||||||
* @property {'config'} CONFIG - Key for the config cache.
|
* @property {'config'} CONFIG - Key for the config cache.
|
||||||
|
* @property {'plugins'} PLUGINS - Key for the plugins cache.
|
||||||
* @property {'modelsConfig'} MODELS_CONFIG - Key for the model config cache.
|
* @property {'modelsConfig'} MODELS_CONFIG - Key for the model config cache.
|
||||||
* @property {'defaultConfig'} DEFAULT_CONFIG - Key for the default config cache.
|
* @property {'defaultConfig'} DEFAULT_CONFIG - Key for the default config cache.
|
||||||
* @property {'overrideConfig'} OVERRIDE_CONFIG - Key for the override config cache.
|
* @property {'overrideConfig'} OVERRIDE_CONFIG - Key for the override config cache.
|
||||||
*/
|
*/
|
||||||
const CacheKeys = {
|
const CacheKeys = {
|
||||||
CONFIG: 'config',
|
CONFIG: 'config',
|
||||||
|
PLUGINS: 'plugins',
|
||||||
MODELS_CONFIG: 'modelsConfig',
|
MODELS_CONFIG: 'modelsConfig',
|
||||||
DEFAULT_CONFIG: 'defaultConfig',
|
DEFAULT_CONFIG: 'defaultConfig',
|
||||||
OVERRIDE_CONFIG: 'overrideConfig',
|
OVERRIDE_CONFIG: 'overrideConfig',
|
||||||
|
|
|
||||||
|
|
@ -1,6 +0,0 @@
|
||||||
const path = require('path');
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
publicPath: path.resolve(__dirname, '..', 'client', 'public'),
|
|
||||||
imageOutput: path.resolve(__dirname, '..', 'client', 'public', 'images'),
|
|
||||||
};
|
|
||||||
5
api/config/index.js
Normal file
5
api/config/index.js
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
const logger = require('./winston');
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
logger,
|
||||||
|
};
|
||||||
78
api/config/meiliLogger.js
Normal file
78
api/config/meiliLogger.js
Normal file
|
|
@ -0,0 +1,78 @@
|
||||||
|
const path = require('path');
|
||||||
|
const winston = require('winston');
|
||||||
|
require('winston-daily-rotate-file');
|
||||||
|
|
||||||
|
const logDir = path.join(__dirname, '..', 'logs');
|
||||||
|
|
||||||
|
const { NODE_ENV } = process.env;
|
||||||
|
|
||||||
|
const levels = {
|
||||||
|
error: 0,
|
||||||
|
warn: 1,
|
||||||
|
info: 2,
|
||||||
|
http: 3,
|
||||||
|
verbose: 4,
|
||||||
|
debug: 5,
|
||||||
|
activity: 6,
|
||||||
|
silly: 7,
|
||||||
|
};
|
||||||
|
|
||||||
|
winston.addColors({
|
||||||
|
info: 'green', // fontStyle color
|
||||||
|
warn: 'italic yellow',
|
||||||
|
error: 'red',
|
||||||
|
debug: 'blue',
|
||||||
|
});
|
||||||
|
|
||||||
|
const level = () => {
|
||||||
|
const env = NODE_ENV || 'development';
|
||||||
|
const isDevelopment = env === 'development';
|
||||||
|
return isDevelopment ? 'debug' : 'warn';
|
||||||
|
};
|
||||||
|
|
||||||
|
const fileFormat = winston.format.combine(
|
||||||
|
winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
|
||||||
|
winston.format.errors({ stack: true }),
|
||||||
|
winston.format.splat(),
|
||||||
|
);
|
||||||
|
|
||||||
|
const transports = [
|
||||||
|
new winston.transports.DailyRotateFile({
|
||||||
|
level: 'debug',
|
||||||
|
filename: `${logDir}/meiliSync-%DATE%.log`,
|
||||||
|
datePattern: 'YYYY-MM-DD',
|
||||||
|
zippedArchive: true,
|
||||||
|
maxSize: '20m',
|
||||||
|
maxFiles: '14d',
|
||||||
|
format: fileFormat,
|
||||||
|
}),
|
||||||
|
];
|
||||||
|
|
||||||
|
// if (NODE_ENV !== 'production') {
|
||||||
|
// transports.push(
|
||||||
|
// new winston.transports.Console({
|
||||||
|
// format: winston.format.combine(winston.format.colorize(), winston.format.simple()),
|
||||||
|
// }),
|
||||||
|
// );
|
||||||
|
// }
|
||||||
|
|
||||||
|
const consoleFormat = winston.format.combine(
|
||||||
|
winston.format.colorize({ all: true }),
|
||||||
|
winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
|
||||||
|
winston.format.printf((info) => `${info.timestamp} ${info.level}: ${info.message}`),
|
||||||
|
);
|
||||||
|
|
||||||
|
transports.push(
|
||||||
|
new winston.transports.Console({
|
||||||
|
level: 'info',
|
||||||
|
format: consoleFormat,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const logger = winston.createLogger({
|
||||||
|
level: level(),
|
||||||
|
levels,
|
||||||
|
transports,
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = logger;
|
||||||
128
api/config/parsers.js
Normal file
128
api/config/parsers.js
Normal file
|
|
@ -0,0 +1,128 @@
|
||||||
|
const util = require('util');
|
||||||
|
const winston = require('winston');
|
||||||
|
const traverse = require('traverse');
|
||||||
|
const { klona } = require('klona/full');
|
||||||
|
|
||||||
|
const sensitiveKeys = [/^sk-\w+$/];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determines if a given key string is sensitive.
|
||||||
|
*
|
||||||
|
* @param {string} keyStr - The key string to check.
|
||||||
|
* @returns {boolean} True if the key string matches known sensitive key patterns.
|
||||||
|
*/
|
||||||
|
function isSensitiveKey(keyStr) {
|
||||||
|
if (keyStr) {
|
||||||
|
return sensitiveKeys.some((regex) => regex.test(keyStr));
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recursively redacts sensitive information from an object.
|
||||||
|
*
|
||||||
|
* @param {object} obj - The object to traverse and redact.
|
||||||
|
*/
|
||||||
|
function redactObject(obj) {
|
||||||
|
traverse(obj).forEach(function redactor() {
|
||||||
|
if (isSensitiveKey(this.key)) {
|
||||||
|
this.update('[REDACTED]');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deep copies and redacts sensitive information from an object.
|
||||||
|
*
|
||||||
|
* @param {object} obj - The object to copy and redact.
|
||||||
|
* @returns {object} The redacted copy of the original object.
|
||||||
|
*/
|
||||||
|
function redact(obj) {
|
||||||
|
const copy = klona(obj); // Making a deep copy to prevent side effects
|
||||||
|
redactObject(copy);
|
||||||
|
|
||||||
|
const splat = copy[Symbol.for('splat')];
|
||||||
|
redactObject(splat); // Specifically redact splat Symbol
|
||||||
|
|
||||||
|
return copy;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Truncates long strings, especially base64 image data, within log messages.
|
||||||
|
*
|
||||||
|
* @param {any} value - The value to be inspected and potentially truncated.
|
||||||
|
* @returns {any} - The truncated or original value.
|
||||||
|
*/
|
||||||
|
const truncateLongStrings = (value) => {
|
||||||
|
if (typeof value === 'string') {
|
||||||
|
return value.length > 100 ? value.substring(0, 100) + '... [truncated]' : value;
|
||||||
|
}
|
||||||
|
|
||||||
|
return value;
|
||||||
|
};
|
||||||
|
|
||||||
|
// /**
|
||||||
|
// * Processes each message in the messages array, specifically looking for and truncating
|
||||||
|
// * base64 image URLs in the content. If a base64 image URL is found, it replaces the URL
|
||||||
|
// * with a truncated message.
|
||||||
|
// *
|
||||||
|
// * @param {PayloadMessage} message - The payload message object to format.
|
||||||
|
// * @returns {PayloadMessage} - The processed message object with base64 image URLs truncated.
|
||||||
|
// */
|
||||||
|
// const truncateBase64ImageURLs = (message) => {
|
||||||
|
// // Create a deep copy of the message
|
||||||
|
// const messageCopy = JSON.parse(JSON.stringify(message));
|
||||||
|
|
||||||
|
// if (messageCopy.content && Array.isArray(messageCopy.content)) {
|
||||||
|
// messageCopy.content = messageCopy.content.map(contentItem => {
|
||||||
|
// if (contentItem.type === 'image_url' && contentItem.image_url && isBase64String(contentItem.image_url.url)) {
|
||||||
|
// return { ...contentItem, image_url: { ...contentItem.image_url, url: 'Base64 Image Data... [truncated]' } };
|
||||||
|
// }
|
||||||
|
// return contentItem;
|
||||||
|
// });
|
||||||
|
// }
|
||||||
|
// return messageCopy;
|
||||||
|
// };
|
||||||
|
|
||||||
|
// /**
|
||||||
|
// * Checks if a string is a base64 image data string.
|
||||||
|
// *
|
||||||
|
// * @param {string} str - The string to be checked.
|
||||||
|
// * @returns {boolean} - True if the string is base64 image data, otherwise false.
|
||||||
|
// */
|
||||||
|
// const isBase64String = (str) => /^data:image\/[a-zA-Z]+;base64,/.test(str);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Custom log format for Winston that handles deep object inspection.
|
||||||
|
* It specifically truncates long strings and handles nested structures within metadata.
|
||||||
|
*
|
||||||
|
* @param {Object} info - Information about the log entry.
|
||||||
|
* @returns {string} - The formatted log message.
|
||||||
|
*/
|
||||||
|
const deepObjectFormat = winston.format.printf(({ level, message, timestamp, ...metadata }) => {
|
||||||
|
let msg = `${timestamp} ${level}: ${message}`;
|
||||||
|
|
||||||
|
if (Object.keys(metadata).length) {
|
||||||
|
Object.entries(metadata).forEach(([key, value]) => {
|
||||||
|
let val = value;
|
||||||
|
if (key === 'modelOptions' && value && Array.isArray(value.messages)) {
|
||||||
|
// Create a shallow copy of the messages array
|
||||||
|
// val = { ...value, messages: value.messages.map(truncateBase64ImageURLs) };
|
||||||
|
val = { ...value, messages: `${value.messages.length} message(s) in payload` };
|
||||||
|
}
|
||||||
|
// Inspects each metadata value; applies special handling for 'messages'
|
||||||
|
const inspectedValue =
|
||||||
|
typeof val === 'string'
|
||||||
|
? truncateLongStrings(val)
|
||||||
|
: util.inspect(val, { depth: null, colors: false }); // Use 'val' here
|
||||||
|
msg += ` ${key}: ${inspectedValue}`;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return msg;
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
redact,
|
||||||
|
deepObjectFormat,
|
||||||
|
};
|
||||||
6
api/config/paths.js
Normal file
6
api/config/paths.js
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
publicPath: path.resolve(__dirname, '..', '..', 'client', 'public'),
|
||||||
|
imageOutput: path.resolve(__dirname, '..', '..', 'client', 'public', 'images'),
|
||||||
|
};
|
||||||
113
api/config/winston.js
Normal file
113
api/config/winston.js
Normal file
|
|
@ -0,0 +1,113 @@
|
||||||
|
const path = require('path');
|
||||||
|
const winston = require('winston');
|
||||||
|
require('winston-daily-rotate-file');
|
||||||
|
const { redact, deepObjectFormat } = require('./parsers');
|
||||||
|
const { isEnabled } = require('~/server/utils/handleText');
|
||||||
|
|
||||||
|
const logDir = path.join(__dirname, '..', 'logs');
|
||||||
|
|
||||||
|
const { NODE_ENV, DEBUG_LOGGING = true, DEBUG_CONSOLE = false } = process.env;
|
||||||
|
|
||||||
|
const levels = {
|
||||||
|
error: 0,
|
||||||
|
warn: 1,
|
||||||
|
info: 2,
|
||||||
|
http: 3,
|
||||||
|
verbose: 4,
|
||||||
|
debug: 5,
|
||||||
|
activity: 6,
|
||||||
|
silly: 7,
|
||||||
|
};
|
||||||
|
|
||||||
|
winston.addColors({
|
||||||
|
info: 'green', // fontStyle color
|
||||||
|
warn: 'italic yellow',
|
||||||
|
error: 'red',
|
||||||
|
debug: 'blue',
|
||||||
|
});
|
||||||
|
|
||||||
|
const level = () => {
|
||||||
|
const env = NODE_ENV || 'development';
|
||||||
|
const isDevelopment = env === 'development';
|
||||||
|
return isDevelopment ? 'debug' : 'warn';
|
||||||
|
};
|
||||||
|
|
||||||
|
const fileFormat = winston.format.combine(
|
||||||
|
winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
|
||||||
|
winston.format.errors({ stack: true }),
|
||||||
|
winston.format.splat(),
|
||||||
|
winston.format((info) => redact(info))(),
|
||||||
|
);
|
||||||
|
|
||||||
|
const transports = [
|
||||||
|
new winston.transports.DailyRotateFile({
|
||||||
|
level: 'error',
|
||||||
|
filename: `${logDir}/error-%DATE%.log`,
|
||||||
|
datePattern: 'YYYY-MM-DD',
|
||||||
|
zippedArchive: true,
|
||||||
|
maxSize: '20m',
|
||||||
|
maxFiles: '14d',
|
||||||
|
format: fileFormat,
|
||||||
|
}),
|
||||||
|
// new winston.transports.DailyRotateFile({
|
||||||
|
// level: 'info',
|
||||||
|
// filename: `${logDir}/info-%DATE%.log`,
|
||||||
|
// datePattern: 'YYYY-MM-DD',
|
||||||
|
// zippedArchive: true,
|
||||||
|
// maxSize: '20m',
|
||||||
|
// maxFiles: '14d',
|
||||||
|
// }),
|
||||||
|
];
|
||||||
|
|
||||||
|
// if (NODE_ENV !== 'production') {
|
||||||
|
// transports.push(
|
||||||
|
// new winston.transports.Console({
|
||||||
|
// format: winston.format.combine(winston.format.colorize(), winston.format.simple()),
|
||||||
|
// }),
|
||||||
|
// );
|
||||||
|
// }
|
||||||
|
|
||||||
|
if (isEnabled && isEnabled(DEBUG_LOGGING)) {
|
||||||
|
transports.push(
|
||||||
|
new winston.transports.DailyRotateFile({
|
||||||
|
level: 'debug',
|
||||||
|
filename: `${logDir}/debug-%DATE%.log`,
|
||||||
|
datePattern: 'YYYY-MM-DD',
|
||||||
|
zippedArchive: true,
|
||||||
|
maxSize: '20m',
|
||||||
|
maxFiles: '14d',
|
||||||
|
format: winston.format.combine(fileFormat, deepObjectFormat),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const consoleFormat = winston.format.combine(
|
||||||
|
winston.format.colorize({ all: true }),
|
||||||
|
winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
|
||||||
|
winston.format((info) => redact(info))(),
|
||||||
|
winston.format.printf((info) => `${info.timestamp} ${info.level}: ${info.message}`),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (isEnabled && isEnabled(DEBUG_CONSOLE)) {
|
||||||
|
transports.push(
|
||||||
|
new winston.transports.Console({
|
||||||
|
level: 'debug',
|
||||||
|
format: winston.format.combine(consoleFormat, deepObjectFormat),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
transports.push(
|
||||||
|
new winston.transports.Console({
|
||||||
|
level: 'info',
|
||||||
|
format: consoleFormat,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const logger = winston.createLogger({
|
||||||
|
level: level(),
|
||||||
|
levels,
|
||||||
|
transports,
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = logger;
|
||||||
|
|
@ -3,7 +3,11 @@ module.exports = {
|
||||||
clearMocks: true,
|
clearMocks: true,
|
||||||
roots: ['<rootDir>'],
|
roots: ['<rootDir>'],
|
||||||
coverageDirectory: 'coverage',
|
coverageDirectory: 'coverage',
|
||||||
setupFiles: ['./test/jestSetup.js', './test/__mocks__/KeyvMongo.js'],
|
setupFiles: [
|
||||||
|
'./test/jestSetup.js',
|
||||||
|
'./test/__mocks__/KeyvMongo.js',
|
||||||
|
'./test/__mocks__/logger.js',
|
||||||
|
],
|
||||||
moduleNameMapper: {
|
moduleNameMapper: {
|
||||||
'~/(.*)': '<rootDir>/$1',
|
'~/(.*)': '<rootDir>/$1',
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,10 @@
|
||||||
const Conversation = require('../../models/schema/convoSchema');
|
|
||||||
const Message = require('../../models/schema/messageSchema');
|
|
||||||
const { MeiliSearch } = require('meilisearch');
|
const { MeiliSearch } = require('meilisearch');
|
||||||
let currentTimeout = null;
|
const Message = require('~/models/schema/messageSchema');
|
||||||
|
const Conversation = require('~/models/schema/convoSchema');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const searchEnabled = process.env?.SEARCH?.toLowerCase() === 'true';
|
const searchEnabled = process.env?.SEARCH?.toLowerCase() === 'true';
|
||||||
|
let currentTimeout = null;
|
||||||
|
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
async function indexSync(req, res, next) {
|
async function indexSync(req, res, next) {
|
||||||
|
|
@ -21,7 +23,7 @@ async function indexSync(req, res, next) {
|
||||||
});
|
});
|
||||||
|
|
||||||
const { status } = await client.health();
|
const { status } = await client.health();
|
||||||
// console.log(`Meilisearch: ${status}`);
|
// logger.debug(`[indexSync] Meilisearch: ${status}`);
|
||||||
const result = status === 'available' && !!process.env.SEARCH;
|
const result = status === 'available' && !!process.env.SEARCH;
|
||||||
|
|
||||||
if (!result) {
|
if (!result) {
|
||||||
|
|
@ -35,39 +37,43 @@ async function indexSync(req, res, next) {
|
||||||
const messagesIndexed = messages.numberOfDocuments;
|
const messagesIndexed = messages.numberOfDocuments;
|
||||||
const convosIndexed = convos.numberOfDocuments;
|
const convosIndexed = convos.numberOfDocuments;
|
||||||
|
|
||||||
console.log(`There are ${messageCount} messages in the database, ${messagesIndexed} indexed`);
|
logger.debug(
|
||||||
console.log(`There are ${convoCount} convos in the database, ${convosIndexed} indexed`);
|
`[indexSync] There are ${messageCount} messages in the database, ${messagesIndexed} indexed`,
|
||||||
|
);
|
||||||
|
logger.debug(
|
||||||
|
`[indexSync] There are ${convoCount} convos in the database, ${convosIndexed} indexed`,
|
||||||
|
);
|
||||||
|
|
||||||
if (messageCount !== messagesIndexed) {
|
if (messageCount !== messagesIndexed) {
|
||||||
console.log('Messages out of sync, indexing');
|
logger.debug('[indexSync] Messages out of sync, indexing');
|
||||||
Message.syncWithMeili();
|
Message.syncWithMeili();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (convoCount !== convosIndexed) {
|
if (convoCount !== convosIndexed) {
|
||||||
console.log('Convos out of sync, indexing');
|
logger.debug('[indexSync] Convos out of sync, indexing');
|
||||||
Conversation.syncWithMeili();
|
Conversation.syncWithMeili();
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
// console.log('in index sync');
|
// logger.debug('[indexSync] in index sync');
|
||||||
if (err.message.includes('not found')) {
|
if (err.message.includes('not found')) {
|
||||||
console.log('Creating indices...');
|
logger.debug('[indexSync] Creating indices...');
|
||||||
currentTimeout = setTimeout(async () => {
|
currentTimeout = setTimeout(async () => {
|
||||||
try {
|
try {
|
||||||
await Message.syncWithMeili();
|
await Message.syncWithMeili();
|
||||||
await Conversation.syncWithMeili();
|
await Conversation.syncWithMeili();
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Trouble creating indices, try restarting the server.');
|
logger.error('[indexSync] Trouble creating indices, try restarting the server.', err);
|
||||||
}
|
}
|
||||||
}, 750);
|
}, 750);
|
||||||
} else {
|
} else {
|
||||||
console.error(err);
|
logger.error('[indexSync] error', err);
|
||||||
// res.status(500).json({ error: 'Server error' });
|
// res.status(500).json({ error: 'Server error' });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
process.on('exit', () => {
|
process.on('exit', () => {
|
||||||
console.log('Clearing sync timeouts before exiting...');
|
logger.debug('[indexSync] Clearing sync timeouts before exiting...');
|
||||||
clearTimeout(currentTimeout);
|
clearTimeout(currentTimeout);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
const mongoose = require('mongoose');
|
const mongoose = require('mongoose');
|
||||||
const balanceSchema = require('./schema/balance');
|
const balanceSchema = require('./schema/balance');
|
||||||
const { getMultiplier } = require('./tx');
|
const { getMultiplier } = require('./tx');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
balanceSchema.statics.check = async function ({
|
balanceSchema.statics.check = async function ({
|
||||||
user,
|
user,
|
||||||
|
|
@ -9,25 +10,21 @@ balanceSchema.statics.check = async function ({
|
||||||
valueKey,
|
valueKey,
|
||||||
tokenType,
|
tokenType,
|
||||||
amount,
|
amount,
|
||||||
debug,
|
|
||||||
}) {
|
}) {
|
||||||
const multiplier = getMultiplier({ valueKey, tokenType, model, endpoint });
|
const multiplier = getMultiplier({ valueKey, tokenType, model, endpoint });
|
||||||
const tokenCost = amount * multiplier;
|
const tokenCost = amount * multiplier;
|
||||||
const { tokenCredits: balance } = (await this.findOne({ user }, 'tokenCredits').lean()) ?? {};
|
const { tokenCredits: balance } = (await this.findOne({ user }, 'tokenCredits').lean()) ?? {};
|
||||||
|
|
||||||
if (debug) {
|
logger.debug('[Balance.check]', {
|
||||||
console.log('balance check', {
|
user,
|
||||||
user,
|
model,
|
||||||
model,
|
endpoint,
|
||||||
endpoint,
|
valueKey,
|
||||||
valueKey,
|
tokenType,
|
||||||
tokenType,
|
amount,
|
||||||
amount,
|
balance,
|
||||||
debug,
|
multiplier,
|
||||||
balance,
|
});
|
||||||
multiplier,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!balance) {
|
if (!balance) {
|
||||||
return {
|
return {
|
||||||
|
|
@ -37,9 +34,7 @@ balanceSchema.statics.check = async function ({
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
if (debug) {
|
logger.debug('[Balance.check]', { tokenCost });
|
||||||
console.log('balance check', { tokenCost });
|
|
||||||
}
|
|
||||||
|
|
||||||
return { canSpend: balance >= tokenCost, balance, tokenCost };
|
return { canSpend: balance >= tokenCost, balance, tokenCost };
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,6 @@
|
||||||
const mongoose = require('mongoose');
|
const mongoose = require('mongoose');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const major = [0, 0];
|
const major = [0, 0];
|
||||||
const minor = [0, 0];
|
const minor = [0, 0];
|
||||||
const patch = [0, 5];
|
const patch = [0, 5];
|
||||||
|
|
@ -69,7 +71,7 @@ module.exports = {
|
||||||
try {
|
try {
|
||||||
return await Config.find(filter).lean();
|
return await Config.find(filter).lean();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(error);
|
logger.error('Error getting configs', error);
|
||||||
return { config: 'Error getting configs' };
|
return { config: 'Error getting configs' };
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
@ -77,7 +79,7 @@ module.exports = {
|
||||||
try {
|
try {
|
||||||
return await Config.deleteMany(filter);
|
return await Config.deleteMany(filter);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(error);
|
logger.error('Error deleting configs', error);
|
||||||
return { config: 'Error deleting configs' };
|
return { config: 'Error deleting configs' };
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,12 @@
|
||||||
// const { Conversation } = require('./plugins');
|
|
||||||
const Conversation = require('./schema/convoSchema');
|
const Conversation = require('./schema/convoSchema');
|
||||||
const { getMessages, deleteMessages } = require('./Message');
|
const { getMessages, deleteMessages } = require('./Message');
|
||||||
|
const logger = require('~/config/winston');
|
||||||
|
|
||||||
const getConvo = async (user, conversationId) => {
|
const getConvo = async (user, conversationId) => {
|
||||||
try {
|
try {
|
||||||
return await Conversation.findOne({ user, conversationId }).lean();
|
return await Conversation.findOne({ user, conversationId }).lean();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log(error);
|
logger.error('[getConvo] Error getting single conversation', error);
|
||||||
return { message: 'Error getting single conversation' };
|
return { message: 'Error getting single conversation' };
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
@ -26,7 +26,7 @@ module.exports = {
|
||||||
upsert: true,
|
upsert: true,
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log(error);
|
logger.error('[saveConvo] Error saving conversation', error);
|
||||||
return { message: 'Error saving conversation' };
|
return { message: 'Error saving conversation' };
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
@ -41,7 +41,7 @@ module.exports = {
|
||||||
.lean();
|
.lean();
|
||||||
return { conversations: convos, pages: totalPages, pageNumber, pageSize };
|
return { conversations: convos, pages: totalPages, pageNumber, pageSize };
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log(error);
|
logger.error('[getConvosByPage] Error getting conversations', error);
|
||||||
return { message: 'Error getting conversations' };
|
return { message: 'Error getting conversations' };
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
@ -87,7 +87,7 @@ module.exports = {
|
||||||
convoMap,
|
convoMap,
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log(error);
|
logger.error('[getConvosQueried] Error getting conversations', error);
|
||||||
return { message: 'Error fetching conversations' };
|
return { message: 'Error fetching conversations' };
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
@ -104,7 +104,7 @@ module.exports = {
|
||||||
return convo?.title || 'New Chat';
|
return convo?.title || 'New Chat';
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log(error);
|
logger.error('[getConvoTitle] Error getting conversation title', error);
|
||||||
return { message: 'Error getting conversation title' };
|
return { message: 'Error getting conversation title' };
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
@ -123,7 +123,7 @@ module.exports = {
|
||||||
* const user = 'someUserId';
|
* const user = 'someUserId';
|
||||||
* const filter = { someField: 'someValue' };
|
* const filter = { someField: 'someValue' };
|
||||||
* const result = await deleteConvos(user, filter);
|
* const result = await deleteConvos(user, filter);
|
||||||
* console.log(result); // { n: 5, ok: 1, deletedCount: 5, messages: { n: 10, ok: 1, deletedCount: 10 } }
|
* logger.error(result); // { n: 5, ok: 1, deletedCount: 5, messages: { n: 10, ok: 1, deletedCount: 10 } }
|
||||||
*/
|
*/
|
||||||
deleteConvos: async (user, filter) => {
|
deleteConvos: async (user, filter) => {
|
||||||
let toRemove = await Conversation.find({ ...filter, user }).select('conversationId');
|
let toRemove = await Conversation.find({ ...filter, user }).select('conversationId');
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
const { z } = require('zod');
|
const { z } = require('zod');
|
||||||
const Message = require('./schema/messageSchema');
|
const Message = require('./schema/messageSchema');
|
||||||
|
const logger = require('~/config/winston');
|
||||||
|
|
||||||
const idSchema = z.string().uuid();
|
const idSchema = z.string().uuid();
|
||||||
|
|
||||||
|
|
@ -67,7 +68,7 @@ module.exports = {
|
||||||
tokenCount,
|
tokenCount,
|
||||||
};
|
};
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(`Error saving message: ${err}`);
|
logger.error('Error saving message:', err);
|
||||||
throw new Error('Failed to save message.');
|
throw new Error('Failed to save message.');
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
@ -92,7 +93,7 @@ module.exports = {
|
||||||
isEdited: true,
|
isEdited: true,
|
||||||
};
|
};
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(`Error updating message: ${err}`);
|
logger.error('Error updating message:', err);
|
||||||
throw new Error('Failed to update message.');
|
throw new Error('Failed to update message.');
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
@ -106,7 +107,7 @@ module.exports = {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(`Error deleting messages: ${err}`);
|
logger.error('Error deleting messages:', err);
|
||||||
throw new Error('Failed to delete messages.');
|
throw new Error('Failed to delete messages.');
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
@ -115,7 +116,7 @@ module.exports = {
|
||||||
try {
|
try {
|
||||||
return await Message.find(filter).sort({ createdAt: 1 }).lean();
|
return await Message.find(filter).sort({ createdAt: 1 }).lean();
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(`Error getting messages: ${err}`);
|
logger.error('Error getting messages:', err);
|
||||||
throw new Error('Failed to get messages.');
|
throw new Error('Failed to get messages.');
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
@ -124,7 +125,7 @@ module.exports = {
|
||||||
try {
|
try {
|
||||||
return await Message.deleteMany(filter);
|
return await Message.deleteMany(filter);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(`Error deleting messages: ${err}`);
|
logger.error('Error deleting messages:', err);
|
||||||
throw new Error('Failed to delete messages.');
|
throw new Error('Failed to delete messages.');
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,11 @@
|
||||||
const Preset = require('./schema/presetSchema');
|
const Preset = require('./schema/presetSchema');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const getPreset = async (user, presetId) => {
|
const getPreset = async (user, presetId) => {
|
||||||
try {
|
try {
|
||||||
return await Preset.findOne({ user, presetId }).lean();
|
return await Preset.findOne({ user, presetId }).lean();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log(error);
|
logger.error('[getPreset] Error getting single preset', error);
|
||||||
return { message: 'Error getting single preset' };
|
return { message: 'Error getting single preset' };
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
@ -30,7 +31,7 @@ module.exports = {
|
||||||
|
|
||||||
return presets;
|
return presets;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log(error);
|
logger.error('[getPresets] Error getting presets', error);
|
||||||
return { message: 'Error retrieving presets' };
|
return { message: 'Error retrieving presets' };
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
@ -62,7 +63,7 @@ module.exports = {
|
||||||
setter.$set = update;
|
setter.$set = update;
|
||||||
return await Preset.findOneAndUpdate({ presetId, user }, setter, { new: true, upsert: true });
|
return await Preset.findOneAndUpdate({ presetId, user }, setter, { new: true, upsert: true });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log(error);
|
logger.error('[savePreset] Error saving preset', error);
|
||||||
return { message: 'Error saving preset' };
|
return { message: 'Error saving preset' };
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
const mongoose = require('mongoose');
|
const mongoose = require('mongoose');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const promptSchema = mongoose.Schema(
|
const promptSchema = mongoose.Schema(
|
||||||
{
|
{
|
||||||
|
|
@ -28,7 +29,7 @@ module.exports = {
|
||||||
});
|
});
|
||||||
return { title, prompt };
|
return { title, prompt };
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(error);
|
logger.error('Error saving prompt', error);
|
||||||
return { prompt: 'Error saving prompt' };
|
return { prompt: 'Error saving prompt' };
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
@ -36,7 +37,7 @@ module.exports = {
|
||||||
try {
|
try {
|
||||||
return await Prompt.find(filter).lean();
|
return await Prompt.find(filter).lean();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(error);
|
logger.error('Error getting prompts', error);
|
||||||
return { prompt: 'Error getting prompts' };
|
return { prompt: 'Error getting prompts' };
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
@ -44,7 +45,7 @@ module.exports = {
|
||||||
try {
|
try {
|
||||||
return await Prompt.deleteMany(filter);
|
return await Prompt.deleteMany(filter);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(error);
|
logger.error('Error deleting prompts', error);
|
||||||
return { prompt: 'Error deleting prompts' };
|
return { prompt: 'Error deleting prompts' };
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,8 @@
|
||||||
const mongoose = require('mongoose');
|
|
||||||
const crypto = require('crypto');
|
const crypto = require('crypto');
|
||||||
const signPayload = require('../server/services/signPayload');
|
const mongoose = require('mongoose');
|
||||||
|
const signPayload = require('~/server/services/signPayload');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const { REFRESH_TOKEN_EXPIRY } = process.env ?? {};
|
const { REFRESH_TOKEN_EXPIRY } = process.env ?? {};
|
||||||
const expires = eval(REFRESH_TOKEN_EXPIRY) ?? 1000 * 60 * 60 * 24 * 7;
|
const expires = eval(REFRESH_TOKEN_EXPIRY) ?? 1000 * 60 * 60 * 24 * 7;
|
||||||
|
|
||||||
|
|
@ -44,8 +46,8 @@ sessionSchema.methods.generateRefreshToken = async function () {
|
||||||
|
|
||||||
return refreshToken;
|
return refreshToken;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(
|
logger.error(
|
||||||
'Error generating refresh token. Have you set a JWT_REFRESH_SECRET in the .env file?\n\n',
|
'Error generating refresh token. Is a `JWT_REFRESH_SECRET` set in the .env file?\n\n',
|
||||||
error,
|
error,
|
||||||
);
|
);
|
||||||
throw error;
|
throw error;
|
||||||
|
|
@ -59,10 +61,12 @@ sessionSchema.statics.deleteAllUserSessions = async function (userId) {
|
||||||
}
|
}
|
||||||
const result = await this.deleteMany({ user: userId });
|
const result = await this.deleteMany({ user: userId });
|
||||||
if (result && result?.deletedCount > 0) {
|
if (result && result?.deletedCount > 0) {
|
||||||
console.log(`Deleted ${result.deletedCount} sessions for user ${userId}.`);
|
logger.debug(
|
||||||
|
`[deleteAllUserSessions] Deleted ${result.deletedCount} sessions for user ${userId}.`,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log('Error in deleting user sessions:', error);
|
logger.error('[deleteAllUserSessions] Error in deleting user sessions:', error);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,6 @@ const { logViolation } = require('../cache');
|
||||||
* @param {string} params.txData.user - The user ID or identifier.
|
* @param {string} params.txData.user - The user ID or identifier.
|
||||||
* @param {('prompt' | 'completion')} params.txData.tokenType - The type of token.
|
* @param {('prompt' | 'completion')} params.txData.tokenType - The type of token.
|
||||||
* @param {number} params.txData.amount - The amount of tokens.
|
* @param {number} params.txData.amount - The amount of tokens.
|
||||||
* @param {boolean} params.txData.debug - Debug flag.
|
|
||||||
* @param {string} params.txData.model - The model name or identifier.
|
* @param {string} params.txData.model - The model name or identifier.
|
||||||
* @returns {Promise<boolean>} Returns true if the user can spend the amount, otherwise denies the request.
|
* @returns {Promise<boolean>} Returns true if the user can spend the amount, otherwise denies the request.
|
||||||
* @throws {Error} Throws an error if there's an issue with the balance check.
|
* @throws {Error} Throws an error if there's an issue with the balance check.
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,9 @@
|
||||||
|
const _ = require('lodash');
|
||||||
const mongoose = require('mongoose');
|
const mongoose = require('mongoose');
|
||||||
const { MeiliSearch } = require('meilisearch');
|
const { MeiliSearch } = require('meilisearch');
|
||||||
const { cleanUpPrimaryKeyValue } = require('../../lib/utils/misc');
|
const { cleanUpPrimaryKeyValue } = require('~/lib/utils/misc');
|
||||||
const _ = require('lodash');
|
const logger = require('~/config/meiliLogger');
|
||||||
|
|
||||||
const searchEnabled = process.env.SEARCH && process.env.SEARCH.toLowerCase() === 'true';
|
const searchEnabled = process.env.SEARCH && process.env.SEARCH.toLowerCase() === 'true';
|
||||||
const meiliEnabled = process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY && searchEnabled;
|
const meiliEnabled = process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY && searchEnabled;
|
||||||
|
|
||||||
|
|
@ -64,8 +66,7 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||||
offset += batchSize;
|
offset += batchSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('indexMap', indexMap.size);
|
logger.debug('[syncWithMeili]', { indexMap: indexMap.size, mongoMap: mongoMap.size });
|
||||||
console.log('mongoMap', mongoMap.size);
|
|
||||||
|
|
||||||
const updateOps = [];
|
const updateOps = [];
|
||||||
|
|
||||||
|
|
@ -80,7 +81,11 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||||
(doc.text && doc.text !== mongoMap.get(id).text) ||
|
(doc.text && doc.text !== mongoMap.get(id).text) ||
|
||||||
(doc.title && doc.title !== mongoMap.get(id).title)
|
(doc.title && doc.title !== mongoMap.get(id).title)
|
||||||
) {
|
) {
|
||||||
console.log(`${id} had document discrepancy in ${doc.text ? 'text' : 'title'} field`);
|
logger.debug(
|
||||||
|
`[syncWithMeili] ${id} had document discrepancy in ${
|
||||||
|
doc.text ? 'text' : 'title'
|
||||||
|
} field`,
|
||||||
|
);
|
||||||
updateOps.push({
|
updateOps.push({
|
||||||
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
|
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
|
||||||
});
|
});
|
||||||
|
|
@ -116,15 +121,14 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||||
|
|
||||||
if (updateOps.length > 0) {
|
if (updateOps.length > 0) {
|
||||||
await this.collection.bulkWrite(updateOps);
|
await this.collection.bulkWrite(updateOps);
|
||||||
console.log(
|
logger.debug(
|
||||||
`[Meilisearch] Finished indexing ${
|
`[syncWithMeili] Finished indexing ${
|
||||||
primaryKey === 'messageId' ? 'messages' : 'conversations'
|
primaryKey === 'messageId' ? 'messages' : 'conversations'
|
||||||
}`,
|
}`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log('[Meilisearch] Error adding document to Meili');
|
logger.error('[syncWithMeili] Error adding document to Meili', error);
|
||||||
console.error(error);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -143,7 +147,7 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||||
const query = {};
|
const query = {};
|
||||||
// query[primaryKey] = { $in: _.map(data.hits, primaryKey) };
|
// query[primaryKey] = { $in: _.map(data.hits, primaryKey) };
|
||||||
query[primaryKey] = _.map(data.hits, (hit) => cleanUpPrimaryKeyValue(hit[primaryKey]));
|
query[primaryKey] = _.map(data.hits, (hit) => cleanUpPrimaryKeyValue(hit[primaryKey]));
|
||||||
// console.log('query', query);
|
// logger.debug('query', query);
|
||||||
const hitsFromMongoose = await this.find(
|
const hitsFromMongoose = await this.find(
|
||||||
query,
|
query,
|
||||||
_.reduce(
|
_.reduce(
|
||||||
|
|
@ -186,11 +190,11 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||||
async addObjectToMeili() {
|
async addObjectToMeili() {
|
||||||
const object = this.preprocessObjectForIndex();
|
const object = this.preprocessObjectForIndex();
|
||||||
try {
|
try {
|
||||||
// console.log('Adding document to Meili', object);
|
// logger.debug('Adding document to Meili', object);
|
||||||
await index.addDocuments([object]);
|
await index.addDocuments([object]);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// console.log('Error adding document to Meili');
|
// logger.debug('Error adding document to Meili');
|
||||||
// console.error(error);
|
// logger.error(error);
|
||||||
}
|
}
|
||||||
|
|
||||||
await this.collection.updateMany({ _id: this._id }, { $set: { _meiliIndex: true } });
|
await this.collection.updateMany({ _id: this._id }, { $set: { _meiliIndex: true } });
|
||||||
|
|
@ -311,10 +315,10 @@ module.exports = function mongoMeili(schema, options) {
|
||||||
return next();
|
return next();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (meiliEnabled) {
|
if (meiliEnabled) {
|
||||||
console.log(
|
logger.error(
|
||||||
'[Meilisearch] There was an issue deleting conversation indexes upon deletion, next startup may be slow due to syncing',
|
'[MeiliMongooseModel.deleteMany] There was an issue deleting conversation indexes upon deletion, next startup may be slow due to syncing',
|
||||||
|
error,
|
||||||
);
|
);
|
||||||
console.error(error);
|
|
||||||
}
|
}
|
||||||
return next();
|
return next();
|
||||||
}
|
}
|
||||||
|
|
@ -335,7 +339,11 @@ module.exports = function mongoMeili(schema, options) {
|
||||||
try {
|
try {
|
||||||
meiliDoc = await client.index('convos').getDocument(doc.conversationId);
|
meiliDoc = await client.index('convos').getDocument(doc.conversationId);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log('[Meilisearch] Convo not found and will index', doc.conversationId);
|
logger.error(
|
||||||
|
'[MeiliMongooseModel.findOneAndUpdate] Convo not found in MeiliSearch and will index ' +
|
||||||
|
doc.conversationId,
|
||||||
|
error,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
const mongoose = require('mongoose');
|
const mongoose = require('mongoose');
|
||||||
const mongoMeili = require('../plugins/mongoMeili');
|
const mongoMeili = require('~/models/plugins/mongoMeili');
|
||||||
const messageSchema = mongoose.Schema(
|
const messageSchema = mongoose.Schema(
|
||||||
{
|
{
|
||||||
messageId: {
|
messageId: {
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
const Transaction = require('./Transaction');
|
const Transaction = require('./Transaction');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates up to two transactions to record the spending of tokens.
|
* Creates up to two transactions to record the spending of tokens.
|
||||||
|
|
@ -30,7 +31,7 @@ const spendTokens = async (txData, tokenUsage) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!completionTokens) {
|
if (!completionTokens) {
|
||||||
this.debug && console.dir({ prompt, completion }, { depth: null });
|
logger.debug('[spendTokens] !completionTokens', { prompt, completion });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -40,9 +41,9 @@ const spendTokens = async (txData, tokenUsage) => {
|
||||||
rawAmount: -completionTokens,
|
rawAmount: -completionTokens,
|
||||||
});
|
});
|
||||||
|
|
||||||
this.debug && console.dir({ prompt, completion }, { depth: null });
|
logger.debug('[spendTokens] post-transaction', { prompt, completion });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(err);
|
logger.error('[spendTokens]', err);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -51,6 +51,7 @@
|
||||||
"jsonwebtoken": "^9.0.0",
|
"jsonwebtoken": "^9.0.0",
|
||||||
"keyv": "^4.5.4",
|
"keyv": "^4.5.4",
|
||||||
"keyv-file": "^0.2.0",
|
"keyv-file": "^0.2.0",
|
||||||
|
"klona": "^2.0.6",
|
||||||
"langchain": "^0.0.186",
|
"langchain": "^0.0.186",
|
||||||
"librechat-data-provider": "*",
|
"librechat-data-provider": "*",
|
||||||
"lodash": "^4.17.21",
|
"lodash": "^4.17.21",
|
||||||
|
|
@ -74,8 +75,10 @@
|
||||||
"pino": "^8.12.1",
|
"pino": "^8.12.1",
|
||||||
"sharp": "^0.32.6",
|
"sharp": "^0.32.6",
|
||||||
"tiktoken": "^1.0.10",
|
"tiktoken": "^1.0.10",
|
||||||
|
"traverse": "^0.6.7",
|
||||||
"ua-parser-js": "^1.0.36",
|
"ua-parser-js": "^1.0.36",
|
||||||
"winston": "^3.10.0",
|
"winston": "^3.11.0",
|
||||||
|
"winston-daily-rotate-file": "^4.7.1",
|
||||||
"zod": "^3.22.4"
|
"zod": "^3.22.4"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,8 @@
|
||||||
|
const { getResponseSender } = require('librechat-data-provider');
|
||||||
const { sendMessage, createOnProgress } = require('~/server/utils');
|
const { sendMessage, createOnProgress } = require('~/server/utils');
|
||||||
const { saveMessage, getConvoTitle, getConvo } = require('~/models');
|
const { saveMessage, getConvoTitle, getConvo } = require('~/models');
|
||||||
const { getResponseSender } = require('librechat-data-provider');
|
|
||||||
const { createAbortController, handleAbortError } = require('~/server/middleware');
|
const { createAbortController, handleAbortError } = require('~/server/middleware');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const AskController = async (req, res, next, initializeClient) => {
|
const AskController = async (req, res, next, initializeClient) => {
|
||||||
let {
|
let {
|
||||||
|
|
@ -11,8 +12,7 @@ const AskController = async (req, res, next, initializeClient) => {
|
||||||
parentMessageId = null,
|
parentMessageId = null,
|
||||||
overrideParentMessageId = null,
|
overrideParentMessageId = null,
|
||||||
} = req.body;
|
} = req.body;
|
||||||
console.log('ask log');
|
logger.debug('[AskController]', { text, conversationId, ...endpointOption });
|
||||||
console.dir({ text, conversationId, endpointOption }, { depth: null });
|
|
||||||
let metadata;
|
let metadata;
|
||||||
let userMessage;
|
let userMessage;
|
||||||
let promptTokens;
|
let promptTokens;
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,14 @@
|
||||||
const crypto = require('crypto');
|
const crypto = require('crypto');
|
||||||
const cookies = require('cookie');
|
const cookies = require('cookie');
|
||||||
const jwt = require('jsonwebtoken');
|
const jwt = require('jsonwebtoken');
|
||||||
const { Session, User } = require('../../models');
|
const { Session, User } = require('~/models');
|
||||||
const {
|
const {
|
||||||
registerUser,
|
registerUser,
|
||||||
requestPasswordReset,
|
|
||||||
resetPassword,
|
resetPassword,
|
||||||
setAuthTokens,
|
setAuthTokens,
|
||||||
} = require('../services/AuthService');
|
requestPasswordReset,
|
||||||
|
} = require('~/server/services/AuthService');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const registrationController = async (req, res) => {
|
const registrationController = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
|
|
@ -27,7 +28,7 @@ const registrationController = async (req, res) => {
|
||||||
res.status(status).send({ message });
|
res.status(status).send({ message });
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log(err);
|
logger.error('[registrationController]', err);
|
||||||
return res.status(500).json({ message: err.message });
|
return res.status(500).json({ message: err.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
@ -45,7 +46,7 @@ const resetPasswordRequestController = async (req, res) => {
|
||||||
return res.status(200).json(resetService);
|
return res.status(200).json(resetService);
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log(e);
|
logger.error('[resetPasswordRequestController]', e);
|
||||||
return res.status(400).json({ message: e.message });
|
return res.status(400).json({ message: e.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
@ -63,7 +64,7 @@ const resetPasswordController = async (req, res) => {
|
||||||
return res.status(200).json(resetPasswordService);
|
return res.status(200).json(resetPasswordService);
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log(e);
|
logger.error('[resetPasswordController]', e);
|
||||||
return res.status(400).json({ message: e.message });
|
return res.status(400).json({ message: e.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
@ -108,8 +109,7 @@ const refreshController = async (req, res) => {
|
||||||
res.status(401).send('Refresh token expired or not found for this user');
|
res.status(401).send('Refresh token expired or not found for this user');
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Refresh token error', refreshToken);
|
logger.error(`[refreshController] Refresh token: ${refreshToken}`, err);
|
||||||
console.error(err);
|
|
||||||
res.status(403).send('Invalid refresh token');
|
res.status(403).send('Invalid refresh token');
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,8 @@
|
||||||
|
const { getResponseSender } = require('librechat-data-provider');
|
||||||
const { sendMessage, createOnProgress } = require('~/server/utils');
|
const { sendMessage, createOnProgress } = require('~/server/utils');
|
||||||
const { saveMessage, getConvoTitle, getConvo } = require('~/models');
|
const { saveMessage, getConvoTitle, getConvo } = require('~/models');
|
||||||
const { getResponseSender } = require('librechat-data-provider');
|
|
||||||
const { createAbortController, handleAbortError } = require('~/server/middleware');
|
const { createAbortController, handleAbortError } = require('~/server/middleware');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const EditController = async (req, res, next, initializeClient) => {
|
const EditController = async (req, res, next, initializeClient) => {
|
||||||
let {
|
let {
|
||||||
|
|
@ -14,8 +15,13 @@ const EditController = async (req, res, next, initializeClient) => {
|
||||||
parentMessageId = null,
|
parentMessageId = null,
|
||||||
overrideParentMessageId = null,
|
overrideParentMessageId = null,
|
||||||
} = req.body;
|
} = req.body;
|
||||||
console.log('edit log');
|
logger.debug('[EditController]', {
|
||||||
console.dir({ text, generation, isContinued, conversationId, endpointOption }, { depth: null });
|
text,
|
||||||
|
generation,
|
||||||
|
isContinued,
|
||||||
|
conversationId,
|
||||||
|
...endpointOption,
|
||||||
|
});
|
||||||
let metadata;
|
let metadata;
|
||||||
let userMessage;
|
let userMessage;
|
||||||
let promptTokens;
|
let promptTokens;
|
||||||
|
|
|
||||||
|
|
@ -1,15 +1,17 @@
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
//handle duplicates
|
//handle duplicates
|
||||||
const handleDuplicateKeyError = (err, res) => {
|
const handleDuplicateKeyError = (err, res) => {
|
||||||
|
logger.error('Duplicate key error:', err.keyValue);
|
||||||
const field = Object.keys(err.keyValue);
|
const field = Object.keys(err.keyValue);
|
||||||
const code = 409;
|
const code = 409;
|
||||||
const error = `An document with that ${field} already exists.`;
|
const error = `An document with that ${field} already exists.`;
|
||||||
console.log('congrats you hit the duped keys error');
|
|
||||||
res.status(code).send({ messages: error, fields: field });
|
res.status(code).send({ messages: error, fields: field });
|
||||||
};
|
};
|
||||||
|
|
||||||
//handle validation errors
|
//handle validation errors
|
||||||
const handleValidationError = (err, res) => {
|
const handleValidationError = (err, res) => {
|
||||||
console.log('congrats you hit the validation middleware');
|
logger.error('Validation error:', err.errors);
|
||||||
let errors = Object.values(err.errors).map((el) => el.message);
|
let errors = Object.values(err.errors).map((el) => el.message);
|
||||||
let fields = Object.values(err.errors).map((el) => el.path);
|
let fields = Object.values(err.errors).map((el) => el.path);
|
||||||
let code = 400;
|
let code = 400;
|
||||||
|
|
@ -24,7 +26,6 @@ const handleValidationError = (err, res) => {
|
||||||
// eslint-disable-next-line no-unused-vars
|
// eslint-disable-next-line no-unused-vars
|
||||||
module.exports = (err, req, res, next) => {
|
module.exports = (err, req, res, next) => {
|
||||||
try {
|
try {
|
||||||
console.log('congrats you hit the error middleware');
|
|
||||||
if (err.name === 'ValidationError') {
|
if (err.name === 'ValidationError') {
|
||||||
return (err = handleValidationError(err, res));
|
return (err = handleValidationError(err, res));
|
||||||
}
|
}
|
||||||
|
|
@ -32,6 +33,7 @@ module.exports = (err, req, res, next) => {
|
||||||
return (err = handleDuplicateKeyError(err, res));
|
return (err = handleDuplicateKeyError(err, res));
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
logger.error('ErrorController => error', err);
|
||||||
res.status(500).send('An unknown error occurred.');
|
res.status(500).send('An unknown error occurred.');
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,8 @@
|
||||||
const { promises: fs } = require('fs');
|
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
const { addOpenAPISpecs } = require('../../app/clients/tools/util/addOpenAPISpecs');
|
const { promises: fs } = require('fs');
|
||||||
|
const { addOpenAPISpecs } = require('~/app/clients/tools/util/addOpenAPISpecs');
|
||||||
|
const { CacheKeys } = require('~/common/enums');
|
||||||
|
const { getLogStores } = require('~/cache');
|
||||||
|
|
||||||
const filterUniquePlugins = (plugins) => {
|
const filterUniquePlugins = (plugins) => {
|
||||||
const seen = new Set();
|
const seen = new Set();
|
||||||
|
|
@ -27,6 +29,13 @@ const isPluginAuthenticated = (plugin) => {
|
||||||
|
|
||||||
const getAvailablePluginsController = async (req, res) => {
|
const getAvailablePluginsController = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
|
const cache = getLogStores(CacheKeys.CONFIG);
|
||||||
|
const cachedPlugins = await cache.get(CacheKeys.PLUGINS);
|
||||||
|
if (cachedPlugins) {
|
||||||
|
res.status(200).json(cachedPlugins);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const manifestFile = await fs.readFile(
|
const manifestFile = await fs.readFile(
|
||||||
path.join(__dirname, '..', '..', 'app', 'clients', 'tools', 'manifest.json'),
|
path.join(__dirname, '..', '..', 'app', 'clients', 'tools', 'manifest.json'),
|
||||||
'utf8',
|
'utf8',
|
||||||
|
|
@ -42,6 +51,7 @@ const getAvailablePluginsController = async (req, res) => {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
const plugins = await addOpenAPISpecs(authenticatedPlugins);
|
const plugins = await addOpenAPISpecs(authenticatedPlugins);
|
||||||
|
await cache.set(CacheKeys.PLUGINS, plugins);
|
||||||
res.status(200).json(plugins);
|
res.status(200).json(plugins);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
res.status(500).json({ message: error.message });
|
res.status(500).json({ message: error.message });
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
const { updateUserPluginsService } = require('../services/UserService');
|
const { updateUserPluginsService } = require('~/server/services/UserService');
|
||||||
const { updateUserPluginAuth, deleteUserPluginAuth } = require('../services/PluginService');
|
const { updateUserPluginAuth, deleteUserPluginAuth } = require('~/server/services/PluginService');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const getUserController = async (req, res) => {
|
const getUserController = async (req, res) => {
|
||||||
res.status(200).send(req.user);
|
res.status(200).send(req.user);
|
||||||
|
|
@ -13,7 +14,7 @@ const updateUserPluginsController = async (req, res) => {
|
||||||
const userPluginsService = await updateUserPluginsService(user, pluginKey, action);
|
const userPluginsService = await updateUserPluginsService(user, pluginKey, action);
|
||||||
|
|
||||||
if (userPluginsService instanceof Error) {
|
if (userPluginsService instanceof Error) {
|
||||||
console.log(userPluginsService);
|
logger.error('[userPluginsService]', userPluginsService);
|
||||||
const { status, message } = userPluginsService;
|
const { status, message } = userPluginsService;
|
||||||
res.status(status).send({ message });
|
res.status(status).send({ message });
|
||||||
}
|
}
|
||||||
|
|
@ -24,7 +25,7 @@ const updateUserPluginsController = async (req, res) => {
|
||||||
for (let i = 0; i < keys.length; i++) {
|
for (let i = 0; i < keys.length; i++) {
|
||||||
authService = await updateUserPluginAuth(user.id, keys[i], pluginKey, values[i]);
|
authService = await updateUserPluginAuth(user.id, keys[i], pluginKey, values[i]);
|
||||||
if (authService instanceof Error) {
|
if (authService instanceof Error) {
|
||||||
console.log(authService);
|
logger.error('[authService]', authService);
|
||||||
const { status, message } = authService;
|
const { status, message } = authService;
|
||||||
res.status(status).send({ message });
|
res.status(status).send({ message });
|
||||||
}
|
}
|
||||||
|
|
@ -34,7 +35,7 @@ const updateUserPluginsController = async (req, res) => {
|
||||||
for (let i = 0; i < keys.length; i++) {
|
for (let i = 0; i < keys.length; i++) {
|
||||||
authService = await deleteUserPluginAuth(user.id, keys[i]);
|
authService = await deleteUserPluginAuth(user.id, keys[i]);
|
||||||
if (authService instanceof Error) {
|
if (authService instanceof Error) {
|
||||||
console.log(authService);
|
logger.error('[authService]', authService);
|
||||||
const { status, message } = authService;
|
const { status, message } = authService;
|
||||||
res.status(status).send({ message });
|
res.status(status).send({ message });
|
||||||
}
|
}
|
||||||
|
|
@ -44,7 +45,7 @@ const updateUserPluginsController = async (req, res) => {
|
||||||
|
|
||||||
res.status(200).send();
|
res.status(200).send();
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log(err);
|
logger.error('[updateUserPluginsController]', err);
|
||||||
res.status(500).json({ message: err.message });
|
res.status(500).json({ message: err.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
const User = require('../../../models/User');
|
const User = require('~/models/User');
|
||||||
const { setAuthTokens } = require('../../services/AuthService');
|
const { setAuthTokens } = require('~/server/services/AuthService');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const loginController = async (req, res) => {
|
const loginController = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
|
|
@ -15,7 +16,7 @@ const loginController = async (req, res) => {
|
||||||
|
|
||||||
return res.status(200).send({ token, user });
|
return res.status(200).send({ token, user });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log(err);
|
logger.error('[loginController]', err);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Generic error messages are safer
|
// Generic error messages are safer
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
const { logoutUser } = require('../../services/AuthService');
|
|
||||||
const cookies = require('cookie');
|
const cookies = require('cookie');
|
||||||
|
const { logoutUser } = require('~/server/services/AuthService');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const logoutController = async (req, res) => {
|
const logoutController = async (req, res) => {
|
||||||
const refreshToken = req.headers.cookie ? cookies.parse(req.headers.cookie).refreshToken : null;
|
const refreshToken = req.headers.cookie ? cookies.parse(req.headers.cookie).refreshToken : null;
|
||||||
|
|
@ -9,7 +10,7 @@ const logoutController = async (req, res) => {
|
||||||
res.clearCookie('refreshToken');
|
res.clearCookie('refreshToken');
|
||||||
return res.status(status).send({ message });
|
return res.status(status).send({ message });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log(err);
|
logger.error('[logoutController]', err);
|
||||||
return res.status(500).json({ message: err.message });
|
return res.status(500).json({ message: err.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -6,8 +6,10 @@ const passport = require('passport');
|
||||||
const mongoSanitize = require('express-mongo-sanitize');
|
const mongoSanitize = require('express-mongo-sanitize');
|
||||||
const errorController = require('./controllers/ErrorController');
|
const errorController = require('./controllers/ErrorController');
|
||||||
const configureSocialLogins = require('./socialLogins');
|
const configureSocialLogins = require('./socialLogins');
|
||||||
const { connectDb, indexSync } = require('../lib/db');
|
const { connectDb, indexSync } = require('~/lib/db');
|
||||||
const config = require('../config');
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
|
const paths = require('~/config/paths');
|
||||||
const routes = require('./routes');
|
const routes = require('./routes');
|
||||||
|
|
||||||
const { PORT, HOST, ALLOW_SOCIAL_LOGIN } = process.env ?? {};
|
const { PORT, HOST, ALLOW_SOCIAL_LOGIN } = process.env ?? {};
|
||||||
|
|
@ -15,15 +17,15 @@ const { PORT, HOST, ALLOW_SOCIAL_LOGIN } = process.env ?? {};
|
||||||
const port = Number(PORT) || 3080;
|
const port = Number(PORT) || 3080;
|
||||||
const host = HOST || 'localhost';
|
const host = HOST || 'localhost';
|
||||||
const projectPath = path.join(__dirname, '..', '..', 'client');
|
const projectPath = path.join(__dirname, '..', '..', 'client');
|
||||||
const { jwtLogin, passportLogin } = require('../strategies');
|
const { jwtLogin, passportLogin } = require('~/strategies');
|
||||||
|
|
||||||
const startServer = async () => {
|
const startServer = async () => {
|
||||||
await connectDb();
|
await connectDb();
|
||||||
console.log('Connected to MongoDB');
|
logger.info('Connected to MongoDB');
|
||||||
await indexSync();
|
await indexSync();
|
||||||
|
|
||||||
const app = express();
|
const app = express();
|
||||||
app.locals.config = config;
|
app.locals.config = paths;
|
||||||
|
|
||||||
// Middleware
|
// Middleware
|
||||||
app.use(errorController);
|
app.use(errorController);
|
||||||
|
|
@ -77,11 +79,11 @@ const startServer = async () => {
|
||||||
|
|
||||||
app.listen(port, host, () => {
|
app.listen(port, host, () => {
|
||||||
if (host == '0.0.0.0') {
|
if (host == '0.0.0.0') {
|
||||||
console.log(
|
logger.info(
|
||||||
`Server listening on all interfaces at port ${port}. Use http://localhost:${port} to access it`,
|
`Server listening on all interfaces at port ${port}. Use http://localhost:${port} to access it`,
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
console.log(`Server listening at http://${host == '0.0.0.0' ? 'localhost' : host}:${port}`);
|
logger.info(`Server listening at http://${host == '0.0.0.0' ? 'localhost' : host}:${port}`);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
@ -91,13 +93,12 @@ startServer();
|
||||||
let messageCount = 0;
|
let messageCount = 0;
|
||||||
process.on('uncaughtException', (err) => {
|
process.on('uncaughtException', (err) => {
|
||||||
if (!err.message.includes('fetch failed')) {
|
if (!err.message.includes('fetch failed')) {
|
||||||
console.error('There was an uncaught error:');
|
logger.error('There was an uncaught error:', err);
|
||||||
console.error(err);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (err.message.includes('fetch failed')) {
|
if (err.message.includes('fetch failed')) {
|
||||||
if (messageCount === 0) {
|
if (messageCount === 0) {
|
||||||
console.error('Meilisearch error, search will be disabled');
|
logger.warn('Meilisearch error, search will be disabled');
|
||||||
messageCount++;
|
messageCount++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -105,7 +106,7 @@ process.on('uncaughtException', (err) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (err.message.includes('OpenAIError') || err.message.includes('ChatCompletionMessage')) {
|
if (err.message.includes('OpenAIError') || err.message.includes('ChatCompletionMessage')) {
|
||||||
console.error(
|
logger.error(
|
||||||
'\n\nAn Uncaught `OpenAIError` error may be due to your reverse-proxy setup or stream configuration, or a bug in the `openai` node package.',
|
'\n\nAn Uncaught `OpenAIError` error may be due to your reverse-proxy setup or stream configuration, or a bug in the `openai` node package.',
|
||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,7 @@ const { saveMessage, getConvo, getConvoTitle } = require('~/models');
|
||||||
const clearPendingReq = require('~/cache/clearPendingReq');
|
const clearPendingReq = require('~/cache/clearPendingReq');
|
||||||
const abortControllers = require('./abortControllers');
|
const abortControllers = require('./abortControllers');
|
||||||
const spendTokens = require('~/models/spendTokens');
|
const spendTokens = require('~/models/spendTokens');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
async function abortMessage(req, res) {
|
async function abortMessage(req, res) {
|
||||||
const { abortKey } = req.body;
|
const { abortKey } = req.body;
|
||||||
|
|
@ -13,7 +14,7 @@ async function abortMessage(req, res) {
|
||||||
|
|
||||||
const { abortController } = abortControllers.get(abortKey);
|
const { abortController } = abortControllers.get(abortKey);
|
||||||
const ret = await abortController.abortCompletion();
|
const ret = await abortController.abortCompletion();
|
||||||
console.log('Aborted request', abortKey);
|
logger.debug('[abortMessage] Aborted request', { abortKey });
|
||||||
abortControllers.delete(abortKey);
|
abortControllers.delete(abortKey);
|
||||||
res.send(JSON.stringify(ret));
|
res.send(JSON.stringify(ret));
|
||||||
}
|
}
|
||||||
|
|
@ -26,7 +27,7 @@ const handleAbort = () => {
|
||||||
}
|
}
|
||||||
return await abortMessage(req, res);
|
return await abortMessage(req, res);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(err);
|
logger.error('[abortMessage] handleAbort error', err);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
@ -82,7 +83,7 @@ const createAbortController = (req, res, getAbortData) => {
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleAbortError = async (res, req, error, data) => {
|
const handleAbortError = async (res, req, error, data) => {
|
||||||
console.error(error);
|
logger.error('[handleAbortError] response error and aborting request', error);
|
||||||
const { sender, conversationId, messageId, parentMessageId, partialText } = data;
|
const { sender, conversationId, messageId, parentMessageId, partialText } = data;
|
||||||
|
|
||||||
const respondWithError = async () => {
|
const respondWithError = async () => {
|
||||||
|
|
@ -110,7 +111,7 @@ const handleAbortError = async (res, req, error, data) => {
|
||||||
try {
|
try {
|
||||||
return await abortMessage(req, res);
|
return await abortMessage(req, res);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(err);
|
logger.error('[handleAbortError] error while trying to abort message', err);
|
||||||
return respondWithError();
|
return respondWithError();
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
const Keyv = require('keyv');
|
const Keyv = require('keyv');
|
||||||
const { KeyvFile } = require('keyv-file');
|
const { KeyvFile } = require('keyv-file');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const addToCache = async ({ endpoint, endpointOption, userMessage, responseMessage }) => {
|
const addToCache = async ({ endpoint, endpointOption, userMessage, responseMessage }) => {
|
||||||
try {
|
try {
|
||||||
|
|
@ -57,7 +58,7 @@ const addToCache = async ({ endpoint, endpointOption, userMessage, responseMessa
|
||||||
|
|
||||||
await conversationsCache.set(conversationId, conversation);
|
await conversationsCache.set(conversationId, conversation);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Trouble adding to cache', error);
|
logger.error('[addToCache] Error adding conversation to cache', error);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,12 @@
|
||||||
const express = require('express');
|
|
||||||
const crypto = require('crypto');
|
const crypto = require('crypto');
|
||||||
|
const express = require('express');
|
||||||
|
const { saveMessage, getConvoTitle, saveConvo, getConvo } = require('~/models');
|
||||||
|
const { handleError, sendMessage, createOnProgress, handleText } = require('~/server/utils');
|
||||||
|
const { setHeaders } = require('~/server/middleware');
|
||||||
|
const { browserClient } = require('~/app/');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const { browserClient } = require('../../../app/');
|
|
||||||
const { saveMessage, getConvoTitle, saveConvo, getConvo } = require('../../../models');
|
|
||||||
const { handleError, sendMessage, createOnProgress, handleText } = require('../../utils');
|
|
||||||
const { setHeaders } = require('../../middleware');
|
|
||||||
|
|
||||||
router.post('/', setHeaders, async (req, res) => {
|
router.post('/', setHeaders, async (req, res) => {
|
||||||
const {
|
const {
|
||||||
|
|
@ -41,10 +43,10 @@ router.post('/', setHeaders, async (req, res) => {
|
||||||
key: req.body?.key ?? null,
|
key: req.body?.key ?? null,
|
||||||
};
|
};
|
||||||
|
|
||||||
console.log('ask log', {
|
logger.debug('[/ask/chatGPTBrowser]', {
|
||||||
userMessage,
|
userMessage,
|
||||||
endpointOption,
|
|
||||||
conversationId,
|
conversationId,
|
||||||
|
...endpointOption,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!overrideParentMessageId) {
|
if (!overrideParentMessageId) {
|
||||||
|
|
@ -136,7 +138,7 @@ const ask = async ({
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log('CLIENT RESPONSE', response);
|
logger.debug('[/ask/chatGPTBrowser]', response);
|
||||||
|
|
||||||
const newConversationId = response.conversationId || conversationId;
|
const newConversationId = response.conversationId || conversationId;
|
||||||
const newUserMassageId = response.parentMessageId || userMessageId;
|
const newUserMassageId = response.parentMessageId || userMessageId;
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,12 @@
|
||||||
const express = require('express');
|
const express = require('express');
|
||||||
const crypto = require('crypto');
|
const crypto = require('crypto');
|
||||||
|
const { handleError, sendMessage, createOnProgress, handleText } = require('~/server/utils');
|
||||||
|
const { saveMessage, getConvoTitle, saveConvo, getConvo } = require('~/models');
|
||||||
|
const { setHeaders } = require('~/server/middleware');
|
||||||
|
const { titleConvoBing, askBing } = require('~/app');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const { titleConvoBing, askBing } = require('../../../app');
|
|
||||||
const { saveMessage, getConvoTitle, saveConvo, getConvo } = require('../../../models');
|
|
||||||
const { handleError, sendMessage, createOnProgress, handleText } = require('../../utils');
|
|
||||||
const { setHeaders } = require('../../middleware');
|
|
||||||
|
|
||||||
router.post('/', setHeaders, async (req, res) => {
|
router.post('/', setHeaders, async (req, res) => {
|
||||||
const {
|
const {
|
||||||
|
|
@ -60,7 +62,7 @@ router.post('/', setHeaders, async (req, res) => {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('ask log', {
|
logger.debug('[/ask/bingAI] ask log', {
|
||||||
userMessage,
|
userMessage,
|
||||||
endpointOption,
|
endpointOption,
|
||||||
conversationId,
|
conversationId,
|
||||||
|
|
@ -153,10 +155,10 @@ const ask = async ({
|
||||||
abortController,
|
abortController,
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log('BING RESPONSE', response);
|
logger.debug('[/ask/bingAI] BING RESPONSE', response);
|
||||||
|
|
||||||
if (response.details && response.details.scores) {
|
if (response.details && response.details.scores) {
|
||||||
console.log('SCORES', response.details.scores);
|
logger.debug('[/ask/bingAI] SCORES', response.details.scores);
|
||||||
}
|
}
|
||||||
|
|
||||||
const newConversationId = endpointOption?.jailbreak
|
const newConversationId = endpointOption?.jailbreak
|
||||||
|
|
@ -250,7 +252,7 @@ const ask = async ({
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(error);
|
logger.error('[/ask/bingAI] Error handling BingAI response', error);
|
||||||
const partialText = getPartialText();
|
const partialText = getPartialText();
|
||||||
if (partialText?.length > 2) {
|
if (partialText?.length > 2) {
|
||||||
const responseMessage = {
|
const responseMessage = {
|
||||||
|
|
@ -276,7 +278,7 @@ const ask = async ({
|
||||||
responseMessage: responseMessage,
|
responseMessage: responseMessage,
|
||||||
};
|
};
|
||||||
} else {
|
} else {
|
||||||
console.log(error);
|
logger.error('[/ask/bingAI] Error handling BingAI response', error);
|
||||||
const errorMessage = {
|
const errorMessage = {
|
||||||
messageId: responseMessageId,
|
messageId: responseMessageId,
|
||||||
sender: model,
|
sender: model,
|
||||||
|
|
|
||||||
|
|
@ -14,6 +14,7 @@ const {
|
||||||
validateEndpoint,
|
validateEndpoint,
|
||||||
buildEndpointOption,
|
buildEndpointOption,
|
||||||
} = require('~/server/middleware');
|
} = require('~/server/middleware');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
router.post('/abort', handleAbort());
|
router.post('/abort', handleAbort());
|
||||||
|
|
||||||
|
|
@ -25,8 +26,7 @@ router.post('/', validateEndpoint, buildEndpointOption, setHeaders, async (req,
|
||||||
parentMessageId = null,
|
parentMessageId = null,
|
||||||
overrideParentMessageId = null,
|
overrideParentMessageId = null,
|
||||||
} = req.body;
|
} = req.body;
|
||||||
console.log('ask log');
|
logger.debug('[/ask/gptPlugins]', { text, conversationId, ...endpointOption });
|
||||||
console.dir({ text, conversationId, endpointOption }, { depth: null });
|
|
||||||
let metadata;
|
let metadata;
|
||||||
let userMessage;
|
let userMessage;
|
||||||
let promptTokens;
|
let promptTokens;
|
||||||
|
|
@ -189,8 +189,8 @@ router.post('/', validateEndpoint, buildEndpointOption, setHeaders, async (req,
|
||||||
response = { ...response, ...metadata };
|
response = { ...response, ...metadata };
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('CLIENT RESPONSE');
|
logger.debug('[/ask/gptPlugins]', response);
|
||||||
console.dir(response, { depth: null });
|
|
||||||
response.plugins = plugins.map((p) => ({ ...p, loading: false }));
|
response.plugins = plugins.map((p) => ({ ...p, loading: false }));
|
||||||
await saveMessage({ ...response, user });
|
await saveMessage({ ...response, user });
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,7 @@ const {
|
||||||
validateEndpoint,
|
validateEndpoint,
|
||||||
buildEndpointOption,
|
buildEndpointOption,
|
||||||
} = require('~/server/middleware');
|
} = require('~/server/middleware');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
router.post('/abort', handleAbort());
|
router.post('/abort', handleAbort());
|
||||||
|
|
||||||
|
|
@ -23,8 +24,9 @@ router.post('/', validateEndpoint, buildEndpointOption, setHeaders, async (req,
|
||||||
parentMessageId = null,
|
parentMessageId = null,
|
||||||
overrideParentMessageId = null,
|
overrideParentMessageId = null,
|
||||||
} = req.body;
|
} = req.body;
|
||||||
console.log('ask log');
|
|
||||||
console.dir({ text, conversationId, endpointOption }, { depth: null });
|
logger.debug('[/ask/openAI]', { text, conversationId, ...endpointOption });
|
||||||
|
|
||||||
let metadata;
|
let metadata;
|
||||||
let userMessage;
|
let userMessage;
|
||||||
let promptTokens;
|
let promptTokens;
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,7 @@
|
||||||
const OpenAI = require('openai');
|
const OpenAI = require('openai');
|
||||||
const express = require('express');
|
const express = require('express');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -13,7 +15,7 @@ router.post('/', async (req, res) => {
|
||||||
const openai = new OpenAI(process.env.OPENAI_API_KEY);
|
const openai = new OpenAI(process.env.OPENAI_API_KEY);
|
||||||
const assistantData = req.body;
|
const assistantData = req.body;
|
||||||
const assistant = await openai.beta.assistants.create(assistantData);
|
const assistant = await openai.beta.assistants.create(assistantData);
|
||||||
console.log(assistant);
|
logger.debug('/assistants/', assistant);
|
||||||
res.status(201).json(assistant);
|
res.status(201).json(assistant);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
res.status(500).json({ error: error.message });
|
res.status(500).json({ error: error.message });
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
const crypto = require('crypto');
|
const crypto = require('crypto');
|
||||||
const OpenAI = require('openai');
|
const OpenAI = require('openai');
|
||||||
|
const { logger } = require('~/config');
|
||||||
const { sendMessage } = require('../../utils');
|
const { sendMessage } = require('../../utils');
|
||||||
const { initThread, createRun, handleRun } = require('../../services/AssistantService');
|
const { initThread, createRun, handleRun } = require('../../services/AssistantService');
|
||||||
const express = require('express');
|
const express = require('express');
|
||||||
|
|
@ -23,7 +24,7 @@ const {
|
||||||
*/
|
*/
|
||||||
router.post('/', setHeaders, async (req, res) => {
|
router.post('/', setHeaders, async (req, res) => {
|
||||||
try {
|
try {
|
||||||
console.log(req.body);
|
logger.debug('[/assistants/chat/] req.body', req.body);
|
||||||
// test message:
|
// test message:
|
||||||
// How many polls of 500 ms intervals are there in 18 seconds?
|
// How many polls of 500 ms intervals are there in 18 seconds?
|
||||||
|
|
||||||
|
|
@ -100,7 +101,7 @@ router.post('/', setHeaders, async (req, res) => {
|
||||||
res.end();
|
res.end();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// res.status(500).json({ error: error.message });
|
// res.status(500).json({ error: error.message });
|
||||||
console.error(error);
|
logger.error('[/assistants/chat/]', error);
|
||||||
res.end();
|
res.end();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,8 @@
|
||||||
const express = require('express');
|
const express = require('express');
|
||||||
|
const { isEnabled } = require('~/server/utils');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const { isEnabled } = require('../utils');
|
|
||||||
const emailLoginEnabled =
|
const emailLoginEnabled =
|
||||||
process.env.ALLOW_EMAIL_LOGIN === undefined || isEnabled(process.env.ALLOW_EMAIL_LOGIN);
|
process.env.ALLOW_EMAIL_LOGIN === undefined || isEnabled(process.env.ALLOW_EMAIL_LOGIN);
|
||||||
|
|
||||||
|
|
@ -38,7 +40,7 @@ router.get('/', async function (req, res) {
|
||||||
|
|
||||||
return res.status(200).send(payload);
|
return res.status(200).send(payload);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(err);
|
logger.error('Error in startup config', err);
|
||||||
return res.status(500).send({ error: err.message });
|
return res.status(500).send({ error: err.message });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,9 @@
|
||||||
const express = require('express');
|
const express = require('express');
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const { getConvo, saveConvo } = require('../../models');
|
const { getConvosByPage, deleteConvos } = require('~/models/Conversation');
|
||||||
const { getConvosByPage, deleteConvos } = require('../../models/Conversation');
|
const requireJwtAuth = require('~/server/middleware/requireJwtAuth');
|
||||||
const requireJwtAuth = require('../middleware/requireJwtAuth');
|
const { getConvo, saveConvo } = require('~/models');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
router.use(requireJwtAuth);
|
router.use(requireJwtAuth);
|
||||||
|
|
||||||
|
|
@ -30,7 +31,7 @@ router.post('/clear', async (req, res) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
// for debugging deletion source
|
// for debugging deletion source
|
||||||
// console.log('source:', source);
|
// logger.debug('source:', source);
|
||||||
|
|
||||||
if (source === 'button' && !conversationId) {
|
if (source === 'button' && !conversationId) {
|
||||||
return res.status(200).send('No conversationId provided');
|
return res.status(200).send('No conversationId provided');
|
||||||
|
|
@ -40,7 +41,7 @@ router.post('/clear', async (req, res) => {
|
||||||
const dbResponse = await deleteConvos(req.user.id, filter);
|
const dbResponse = await deleteConvos(req.user.id, filter);
|
||||||
res.status(201).send(dbResponse);
|
res.status(201).send(dbResponse);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(error);
|
logger.error('Error clearing conversations', error);
|
||||||
res.status(500).send(error);
|
res.status(500).send(error);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
@ -52,7 +53,7 @@ router.post('/update', async (req, res) => {
|
||||||
const dbResponse = await saveConvo(req.user.id, update);
|
const dbResponse = await saveConvo(req.user.id, update);
|
||||||
res.status(201).send(dbResponse);
|
res.status(201).send(dbResponse);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(error);
|
logger.error('Error updating conversation', error);
|
||||||
res.status(500).send(error);
|
res.status(500).send(error);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,8 @@
|
||||||
const express = require('express');
|
const express = require('express');
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const { validateTools } = require('~/app');
|
const { validateTools } = require('~/app');
|
||||||
const { saveMessage, getConvoTitle, getConvo } = require('~/models');
|
|
||||||
const { getResponseSender } = require('librechat-data-provider');
|
const { getResponseSender } = require('librechat-data-provider');
|
||||||
|
const { saveMessage, getConvoTitle, getConvo } = require('~/models');
|
||||||
const { initializeClient } = require('~/server/services/Endpoints/gptPlugins');
|
const { initializeClient } = require('~/server/services/Endpoints/gptPlugins');
|
||||||
const { sendMessage, createOnProgress, formatSteps, formatAction } = require('~/server/utils');
|
const { sendMessage, createOnProgress, formatSteps, formatAction } = require('~/server/utils');
|
||||||
const {
|
const {
|
||||||
|
|
@ -13,6 +13,7 @@ const {
|
||||||
validateEndpoint,
|
validateEndpoint,
|
||||||
buildEndpointOption,
|
buildEndpointOption,
|
||||||
} = require('~/server/middleware');
|
} = require('~/server/middleware');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
router.post('/abort', handleAbort());
|
router.post('/abort', handleAbort());
|
||||||
|
|
||||||
|
|
@ -27,8 +28,14 @@ router.post('/', validateEndpoint, buildEndpointOption, setHeaders, async (req,
|
||||||
parentMessageId = null,
|
parentMessageId = null,
|
||||||
overrideParentMessageId = null,
|
overrideParentMessageId = null,
|
||||||
} = req.body;
|
} = req.body;
|
||||||
console.log('edit log');
|
|
||||||
console.dir({ text, generation, isContinued, conversationId, endpointOption }, { depth: null });
|
logger.debug('[/edit/gptPlugins]', {
|
||||||
|
text,
|
||||||
|
generation,
|
||||||
|
isContinued,
|
||||||
|
conversationId,
|
||||||
|
...endpointOption,
|
||||||
|
});
|
||||||
let metadata;
|
let metadata;
|
||||||
let userMessage;
|
let userMessage;
|
||||||
let promptTokens;
|
let promptTokens;
|
||||||
|
|
@ -102,7 +109,7 @@ router.post('/', validateEndpoint, buildEndpointOption, setHeaders, async (req,
|
||||||
saveMessage({ ...userMessage, user });
|
saveMessage({ ...userMessage, user });
|
||||||
}
|
}
|
||||||
sendIntermediateMessage(res, { plugin });
|
sendIntermediateMessage(res, { plugin });
|
||||||
// console.log('PLUGIN ACTION', formattedAction);
|
// logger.debug('PLUGIN ACTION', formattedAction);
|
||||||
};
|
};
|
||||||
|
|
||||||
const onChainEnd = (data) => {
|
const onChainEnd = (data) => {
|
||||||
|
|
@ -111,7 +118,7 @@ router.post('/', validateEndpoint, buildEndpointOption, setHeaders, async (req,
|
||||||
plugin.loading = false;
|
plugin.loading = false;
|
||||||
saveMessage({ ...userMessage, user });
|
saveMessage({ ...userMessage, user });
|
||||||
sendIntermediateMessage(res, { plugin });
|
sendIntermediateMessage(res, { plugin });
|
||||||
// console.log('CHAIN END', plugin.outputs);
|
// logger.debug('CHAIN END', plugin.outputs);
|
||||||
};
|
};
|
||||||
|
|
||||||
const getAbortData = () => ({
|
const getAbortData = () => ({
|
||||||
|
|
@ -162,8 +169,7 @@ router.post('/', validateEndpoint, buildEndpointOption, setHeaders, async (req,
|
||||||
response = { ...response, ...metadata };
|
response = { ...response, ...metadata };
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('CLIENT RESPONSE');
|
logger.debug('[/edit/gptPlugins] CLIENT RESPONSE', response);
|
||||||
console.dir(response, { depth: null });
|
|
||||||
response.plugin = { ...plugin, loading: false };
|
response.plugin = { ...plugin, loading: false };
|
||||||
await saveMessage({ ...response, user });
|
await saveMessage({ ...response, user });
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,7 @@ const {
|
||||||
validateEndpoint,
|
validateEndpoint,
|
||||||
buildEndpointOption,
|
buildEndpointOption,
|
||||||
} = require('~/server/middleware');
|
} = require('~/server/middleware');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
router.post('/abort', handleAbort());
|
router.post('/abort', handleAbort());
|
||||||
|
|
||||||
|
|
@ -26,8 +27,15 @@ router.post('/', validateEndpoint, buildEndpointOption, setHeaders, async (req,
|
||||||
parentMessageId = null,
|
parentMessageId = null,
|
||||||
overrideParentMessageId = null,
|
overrideParentMessageId = null,
|
||||||
} = req.body;
|
} = req.body;
|
||||||
console.log('edit log');
|
|
||||||
console.dir({ text, generation, isContinued, conversationId, endpointOption }, { depth: null });
|
logger.debug('[/edit/openAI]', {
|
||||||
|
text,
|
||||||
|
generation,
|
||||||
|
isContinued,
|
||||||
|
conversationId,
|
||||||
|
...endpointOption,
|
||||||
|
});
|
||||||
|
|
||||||
let metadata;
|
let metadata;
|
||||||
let userMessage;
|
let userMessage;
|
||||||
let promptTokens;
|
let promptTokens;
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,9 @@
|
||||||
const { z } = require('zod');
|
const { z } = require('zod');
|
||||||
|
const path = require('path');
|
||||||
const fs = require('fs').promises;
|
const fs = require('fs').promises;
|
||||||
const express = require('express');
|
const express = require('express');
|
||||||
const { deleteFiles } = require('~/models');
|
const { deleteFiles } = require('~/models');
|
||||||
const path = require('path');
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
|
|
@ -55,7 +56,7 @@ router.delete('/', async (req, res) => {
|
||||||
await Promise.all(promises);
|
await Promise.all(promises);
|
||||||
res.status(200).json({ message: 'Files deleted successfully' });
|
res.status(200).json({ message: 'Files deleted successfully' });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error deleting files:', error);
|
logger.error('[/files] Error deleting files:', error);
|
||||||
res.status(400).json({ message: 'Error in request', error: error.message });
|
res.status(400).json({ message: 'Error in request', error: error.message });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,7 @@ const fs = require('fs').promises;
|
||||||
const express = require('express');
|
const express = require('express');
|
||||||
const upload = require('./multer');
|
const upload = require('./multer');
|
||||||
const { localStrategy } = require('~/server/services/Files');
|
const { localStrategy } = require('~/server/services/Files');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
|
|
@ -35,11 +36,11 @@ router.post('/', upload.single('file'), async (req, res) => {
|
||||||
metadata.file_id = req.file_id;
|
metadata.file_id = req.file_id;
|
||||||
await localStrategy({ req, res, file, metadata });
|
await localStrategy({ req, res, file, metadata });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error processing file:', error);
|
logger.error('[/files/images] Error processing file:', error);
|
||||||
try {
|
try {
|
||||||
await fs.unlink(file.path);
|
await fs.unlink(file.path);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error deleting file:', error);
|
logger.error('[/files/images] Error deleting file:', error);
|
||||||
}
|
}
|
||||||
res.status(500).json({ message: 'Error processing file' });
|
res.status(500).json({ message: 'Error processing file' });
|
||||||
}
|
}
|
||||||
|
|
@ -49,7 +50,7 @@ router.post('/', upload.single('file'), async (req, res) => {
|
||||||
// try {
|
// try {
|
||||||
// // await fs.unlink(file.path);
|
// // await fs.unlink(file.path);
|
||||||
// } catch (error) {
|
// } catch (error) {
|
||||||
// console.error('Error deleting file:', error);
|
// logger.error('[/files/images] Error deleting file:', error);
|
||||||
|
|
||||||
// }
|
// }
|
||||||
// }
|
// }
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,10 @@
|
||||||
const passport = require('passport');
|
const passport = require('passport');
|
||||||
const express = require('express');
|
const express = require('express');
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const { setAuthTokens } = require('../services/AuthService');
|
const { setAuthTokens } = require('~/server/services/AuthService');
|
||||||
const { loginLimiter, checkBan } = require('../middleware');
|
const { loginLimiter, checkBan } = require('~/server/middleware');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const domains = {
|
const domains = {
|
||||||
client: process.env.DOMAIN_CLIENT,
|
client: process.env.DOMAIN_CLIENT,
|
||||||
server: process.env.DOMAIN_SERVER,
|
server: process.env.DOMAIN_SERVER,
|
||||||
|
|
@ -19,7 +21,7 @@ const oauthHandler = async (req, res) => {
|
||||||
await setAuthTokens(req.user._id, res);
|
await setAuthTokens(req.user._id, res);
|
||||||
res.redirect(domains.client);
|
res.redirect(domains.client);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Error in setting authentication tokens:', err);
|
logger.error('Error in setting authentication tokens:', err);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,10 @@
|
||||||
const express = require('express');
|
const express = require('express');
|
||||||
const router = express.Router();
|
|
||||||
const { getPresets, savePreset, deletePresets } = require('../../models');
|
|
||||||
const crypto = require('crypto');
|
const crypto = require('crypto');
|
||||||
const requireJwtAuth = require('../middleware/requireJwtAuth');
|
const { getPresets, savePreset, deletePresets } = require('~/models');
|
||||||
|
const requireJwtAuth = require('~/server/middleware/requireJwtAuth');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
|
const router = express.Router();
|
||||||
|
|
||||||
router.get('/', requireJwtAuth, async (req, res) => {
|
router.get('/', requireJwtAuth, async (req, res) => {
|
||||||
const presets = (await getPresets(req.user.id)).map((preset) => preset);
|
const presets = (await getPresets(req.user.id)).map((preset) => preset);
|
||||||
|
|
@ -18,7 +20,7 @@ router.post('/', requireJwtAuth, async (req, res) => {
|
||||||
const preset = await savePreset(req.user.id, update);
|
const preset = await savePreset(req.user.id, update);
|
||||||
res.status(201).send(preset);
|
res.status(201).send(preset);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(error);
|
logger.error('[/presets] error saving preset', error);
|
||||||
res.status(500).send(error);
|
res.status(500).send(error);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
@ -31,13 +33,13 @@ router.post('/delete', requireJwtAuth, async (req, res) => {
|
||||||
filter = { presetId };
|
filter = { presetId };
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('delete preset filter', filter);
|
logger.debug('[/presets/delete] delete preset filter', filter);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const deleteCount = await deletePresets(req.user.id, filter);
|
const deleteCount = await deletePresets(req.user.id, filter);
|
||||||
res.status(201).send(deleteCount);
|
res.status(201).send(deleteCount);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(error);
|
logger.error('[/presets/delete] error deleting presets', error);
|
||||||
res.status(500).send(error);
|
res.status(500).send(error);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,16 @@
|
||||||
const Keyv = require('keyv');
|
const Keyv = require('keyv');
|
||||||
const express = require('express');
|
const express = require('express');
|
||||||
const router = express.Router();
|
|
||||||
const { MeiliSearch } = require('meilisearch');
|
const { MeiliSearch } = require('meilisearch');
|
||||||
const { Message } = require('../../models/Message');
|
const { Conversation, getConvosQueried } = require('~/models/Conversation');
|
||||||
const { Conversation, getConvosQueried } = require('../../models/Conversation');
|
const requireJwtAuth = require('~/server/middleware/requireJwtAuth');
|
||||||
const { reduceHits } = require('../../lib/utils/reduceHits');
|
const { cleanUpPrimaryKeyValue } = require('~/lib/utils/misc');
|
||||||
const { cleanUpPrimaryKeyValue } = require('../../lib/utils/misc');
|
const { reduceHits } = require('~/lib/utils/reduceHits');
|
||||||
const requireJwtAuth = require('../middleware/requireJwtAuth');
|
const { isEnabled } = require('~/server/utils');
|
||||||
const keyvRedis = require('../../cache/keyvRedis');
|
const { Message } = require('~/models/Message');
|
||||||
const { isEnabled } = require('../utils');
|
const keyvRedis = require('~/cache/keyvRedis');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
|
const router = express.Router();
|
||||||
|
|
||||||
const expiration = 60 * 1000;
|
const expiration = 60 * 1000;
|
||||||
const cache = isEnabled(process.env.USE_REDIS)
|
const cache = isEnabled(process.env.USE_REDIS)
|
||||||
|
|
@ -31,7 +33,7 @@ router.get('/', async function (req, res) {
|
||||||
const key = `${user}:search:${q}`;
|
const key = `${user}:search:${q}`;
|
||||||
const cached = await cache.get(key);
|
const cached = await cache.get(key);
|
||||||
if (cached) {
|
if (cached) {
|
||||||
console.log('cache hit', key);
|
logger.debug('[/search] cache hit: ' + key);
|
||||||
const { pages, pageSize, messages } = cached;
|
const { pages, pageSize, messages } = cached;
|
||||||
res
|
res
|
||||||
.status(200)
|
.status(200)
|
||||||
|
|
@ -39,7 +41,6 @@ router.get('/', async function (req, res) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// const message = await Message.meiliSearch(q);
|
|
||||||
const messages = (
|
const messages = (
|
||||||
await Message.meiliSearch(
|
await Message.meiliSearch(
|
||||||
q,
|
q,
|
||||||
|
|
@ -61,8 +62,8 @@ router.get('/', async function (req, res) {
|
||||||
const titles = (await Conversation.meiliSearch(q)).hits;
|
const titles = (await Conversation.meiliSearch(q)).hits;
|
||||||
const sortedHits = reduceHits(messages, titles);
|
const sortedHits = reduceHits(messages, titles);
|
||||||
// debugging:
|
// debugging:
|
||||||
// console.log('user:', user, 'message hits:', messages.length, 'convo hits:', titles.length);
|
// logger.debug('user:', user, 'message hits:', messages.length, 'convo hits:', titles.length);
|
||||||
// console.log('sorted hits:', sortedHits.length);
|
// logger.debug('sorted hits:', sortedHits.length);
|
||||||
const result = await getConvosQueried(user, sortedHits, pageNumber);
|
const result = await getConvosQueried(user, sortedHits, pageNumber);
|
||||||
|
|
||||||
const activeMessages = [];
|
const activeMessages = [];
|
||||||
|
|
@ -86,10 +87,10 @@ router.get('/', async function (req, res) {
|
||||||
}
|
}
|
||||||
delete result.convoMap;
|
delete result.convoMap;
|
||||||
// for debugging
|
// for debugging
|
||||||
// console.log(result, messages.length);
|
// logger.debug(result, messages.length);
|
||||||
res.status(200).send(result);
|
res.status(200).send(result);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log(error);
|
logger.error('[/search] Error while searching messages & conversations', error);
|
||||||
res.status(500).send({ message: 'Error searching' });
|
res.status(500).send({ message: 'Error searching' });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
@ -114,11 +115,9 @@ router.get('/enable', async function (req, res) {
|
||||||
});
|
});
|
||||||
|
|
||||||
const { status } = await client.health();
|
const { status } = await client.health();
|
||||||
// console.log(`Meilisearch: ${status}`);
|
|
||||||
result = status === 'available' && !!process.env.SEARCH;
|
result = status === 'available' && !!process.env.SEARCH;
|
||||||
return res.send(result);
|
return res.send(result);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// console.error(error);
|
|
||||||
return res.send(false);
|
return res.send(false);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,8 @@
|
||||||
const express = require('express');
|
const express = require('express');
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const { countTokens } = require('../utils');
|
const requireJwtAuth = require('~/server/middleware/requireJwtAuth');
|
||||||
const requireJwtAuth = require('../middleware/requireJwtAuth');
|
const { countTokens } = require('~/server/utils');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
router.post('/', requireJwtAuth, async (req, res) => {
|
router.post('/', requireJwtAuth, async (req, res) => {
|
||||||
try {
|
try {
|
||||||
|
|
@ -9,7 +10,7 @@ router.post('/', requireJwtAuth, async (req, res) => {
|
||||||
const count = await countTokens(arg?.text ?? arg);
|
const count = await countTokens(arg?.text ?? arg);
|
||||||
res.send({ count });
|
res.send({ count });
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error(e);
|
logger.error('[/tokenizer] Error counting tokens', e);
|
||||||
res.status(500).send(e.message);
|
res.status(500).send(e.message);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,12 @@
|
||||||
const crypto = require('crypto');
|
const crypto = require('crypto');
|
||||||
const bcrypt = require('bcryptjs');
|
const bcrypt = require('bcryptjs');
|
||||||
const User = require('../../models/User');
|
const { registerSchema, errorsToString } = require('~/strategies/validators');
|
||||||
const Session = require('../../models/Session');
|
const Token = require('~/models/schema/tokenSchema');
|
||||||
const Token = require('../../models/schema/tokenSchema');
|
const { sendEmail } = require('~/server/utils');
|
||||||
const { registerSchema, errorsToString } = require('../../strategies/validators');
|
const Session = require('~/models/Session');
|
||||||
const { sendEmail } = require('../utils');
|
const { logger } = require('~/config');
|
||||||
|
const User = require('~/models/User');
|
||||||
|
|
||||||
const domains = {
|
const domains = {
|
||||||
client: process.env.DOMAIN_CLIENT,
|
client: process.env.DOMAIN_CLIENT,
|
||||||
server: process.env.DOMAIN_SERVER,
|
server: process.env.DOMAIN_SERVER,
|
||||||
|
|
@ -29,7 +31,7 @@ const logoutUser = async (userId, refreshToken) => {
|
||||||
try {
|
try {
|
||||||
await Session.deleteOne({ _id: session._id });
|
await Session.deleteOne({ _id: session._id });
|
||||||
} catch (deleteErr) {
|
} catch (deleteErr) {
|
||||||
console.error(deleteErr);
|
logger.error('[logoutUser] Failed to delete session.', deleteErr);
|
||||||
return { status: 500, message: 'Failed to delete session.' };
|
return { status: 500, message: 'Failed to delete session.' };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -50,7 +52,7 @@ const registerUser = async (user) => {
|
||||||
const { error } = registerSchema.safeParse(user);
|
const { error } = registerSchema.safeParse(user);
|
||||||
if (error) {
|
if (error) {
|
||||||
const errorMessage = errorsToString(error.errors);
|
const errorMessage = errorsToString(error.errors);
|
||||||
console.info(
|
logger.info(
|
||||||
'Route: register - Validation Error',
|
'Route: register - Validation Error',
|
||||||
{ name: 'Request params:', value: user },
|
{ name: 'Request params:', value: user },
|
||||||
{ name: 'Validation error:', value: errorMessage },
|
{ name: 'Validation error:', value: errorMessage },
|
||||||
|
|
@ -65,7 +67,7 @@ const registerUser = async (user) => {
|
||||||
const existingUser = await User.findOne({ email }).lean();
|
const existingUser = await User.findOne({ email }).lean();
|
||||||
|
|
||||||
if (existingUser) {
|
if (existingUser) {
|
||||||
console.info(
|
logger.info(
|
||||||
'Register User - Email in use',
|
'Register User - Email in use',
|
||||||
{ name: 'Request params:', value: user },
|
{ name: 'Request params:', value: user },
|
||||||
{ name: 'Existing user:', value: existingUser },
|
{ name: 'Existing user:', value: existingUser },
|
||||||
|
|
@ -229,7 +231,7 @@ const setAuthTokens = async (userId, res, sessionId = null) => {
|
||||||
|
|
||||||
return token;
|
return token;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log('Error in setting authentication tokens:', error);
|
logger.error('[setAuthTokens] Error in setting authentication tokens:', error);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Saves a file to a specified output path with a new filename.
|
* Saves a file to a specified output path with a new filename.
|
||||||
|
|
@ -24,7 +25,7 @@ async function saveFile(file, outputPath, outputFilename) {
|
||||||
|
|
||||||
return outputFilePath;
|
return outputFilePath;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error while saving the file:', error);
|
logger.error('[saveFile] Error while saving the file:', error);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,13 @@
|
||||||
const HttpsProxyAgent = require('https-proxy-agent');
|
|
||||||
const axios = require('axios');
|
|
||||||
const Keyv = require('keyv');
|
const Keyv = require('keyv');
|
||||||
|
const axios = require('axios');
|
||||||
|
const HttpsProxyAgent = require('https-proxy-agent');
|
||||||
const { isEnabled } = require('~/server/utils');
|
const { isEnabled } = require('~/server/utils');
|
||||||
const { extractBaseURL } = require('~/utils');
|
|
||||||
const keyvRedis = require('~/cache/keyvRedis');
|
const keyvRedis = require('~/cache/keyvRedis');
|
||||||
|
const { extractBaseURL } = require('~/utils');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
// const { getAzureCredentials, genAzureChatCompletion } = require('~/utils/');
|
// const { getAzureCredentials, genAzureChatCompletion } = require('~/utils/');
|
||||||
|
|
||||||
const { openAIApiKey, userProvidedOpenAI } = require('./Config/EndpointService').config;
|
const { openAIApiKey, userProvidedOpenAI } = require('./Config/EndpointService').config;
|
||||||
|
|
||||||
const modelsCache = isEnabled(process.env.USE_REDIS)
|
const modelsCache = isEnabled(process.env.USE_REDIS)
|
||||||
|
|
@ -54,9 +57,9 @@ const fetchOpenAIModels = async (opts = { azure: false, plugins: false }, _model
|
||||||
const res = await axios.get(`${basePath}${opts.azure ? '' : '/models'}`, payload);
|
const res = await axios.get(`${basePath}${opts.azure ? '' : '/models'}`, payload);
|
||||||
|
|
||||||
models = res.data.data.map((item) => item.id);
|
models = res.data.data.map((item) => item.id);
|
||||||
// console.log(`Fetched ${models.length} models from ${opts.azure ? 'Azure ' : ''}OpenAI API`);
|
// logger.debug(`Fetched ${models.length} models from ${opts.azure ? 'Azure ' : ''}OpenAI API`);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log(`Failed to fetch models from ${opts.azure ? 'Azure ' : ''}OpenAI API`);
|
logger.error(`Failed to fetch models from ${opts.azure ? 'Azure ' : ''}OpenAI API`, err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
const PluginAuth = require('../../models/schema/pluginAuthSchema');
|
const PluginAuth = require('~/models/schema/pluginAuthSchema');
|
||||||
const { encrypt, decrypt } = require('../utils/');
|
const { encrypt, decrypt } = require('~/server/utils/');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const getUserPluginAuthValue = async (user, authField) => {
|
const getUserPluginAuthValue = async (user, authField) => {
|
||||||
try {
|
try {
|
||||||
|
|
@ -11,7 +12,7 @@ const getUserPluginAuthValue = async (user, authField) => {
|
||||||
const decryptedValue = decrypt(pluginAuth.value);
|
const decryptedValue = decrypt(pluginAuth.value);
|
||||||
return decryptedValue;
|
return decryptedValue;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log(err);
|
logger.error('[getUserPluginAuthValue]', err);
|
||||||
return err;
|
return err;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
@ -36,7 +37,7 @@ const getUserPluginAuthValue = async (user, authField) => {
|
||||||
|
|
||||||
// return pluginAuth;
|
// return pluginAuth;
|
||||||
// } catch (err) {
|
// } catch (err) {
|
||||||
// console.log(err);
|
// logger.error('[getUserPluginAuthValue]', err);
|
||||||
// return err;
|
// return err;
|
||||||
// }
|
// }
|
||||||
// };
|
// };
|
||||||
|
|
@ -62,7 +63,7 @@ const updateUserPluginAuth = async (userId, authField, pluginKey, value) => {
|
||||||
return newPluginAuth;
|
return newPluginAuth;
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log(err);
|
logger.error('[updateUserPluginAuth]', err);
|
||||||
return err;
|
return err;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
@ -72,7 +73,7 @@ const deleteUserPluginAuth = async (userId, authField) => {
|
||||||
const response = await PluginAuth.deleteOne({ userId, authField });
|
const response = await PluginAuth.deleteOne({ userId, authField });
|
||||||
return response;
|
return response;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log(err);
|
logger.error('[deleteUserPluginAuth]', err);
|
||||||
return err;
|
return err;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @typedef {import('openai').OpenAI} OpenAI
|
* @typedef {import('openai').OpenAI} OpenAI
|
||||||
* @typedef {import('../AssistantService').RunStep} RunStep
|
* @typedef {import('../AssistantService').RunStep} RunStep
|
||||||
|
|
@ -84,8 +86,12 @@ class RunManager {
|
||||||
return await this.handlers['final']({ step, runStatus, stepsByStatus: this.stepsByStatus });
|
return await this.handlers['final']({ step, runStatus, stepsByStatus: this.stepsByStatus });
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`Default handler for ${step.id} with status \`${runStatus}\``);
|
logger.debug(`[RunManager] Default handler for ${step.id} with status \`${runStatus}\``, {
|
||||||
console.dir({ step, runStatus, final, isLast }, { depth: null });
|
step,
|
||||||
|
runStatus,
|
||||||
|
final,
|
||||||
|
isLast,
|
||||||
|
});
|
||||||
return step;
|
return step;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
const { User, Key } = require('../../models');
|
const { User, Key } = require('~/models');
|
||||||
const { encrypt, decrypt } = require('../utils');
|
const { encrypt, decrypt } = require('~/server/utils');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const updateUserPluginsService = async (user, pluginKey, action) => {
|
const updateUserPluginsService = async (user, pluginKey, action) => {
|
||||||
try {
|
try {
|
||||||
|
|
@ -15,7 +16,7 @@ const updateUserPluginsService = async (user, pluginKey, action) => {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log(err);
|
logger.error('[updateUserPluginsService]', err);
|
||||||
return err;
|
return err;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,8 @@
|
||||||
const { Tiktoken } = require('tiktoken/lite');
|
|
||||||
const { load } = require('tiktoken/load');
|
const { load } = require('tiktoken/load');
|
||||||
|
const { Tiktoken } = require('tiktoken/lite');
|
||||||
const registry = require('tiktoken/registry.json');
|
const registry = require('tiktoken/registry.json');
|
||||||
const models = require('tiktoken/model_to_encoding.json');
|
const models = require('tiktoken/model_to_encoding.json');
|
||||||
|
const logger = require('~/config/winston');
|
||||||
|
|
||||||
const countTokens = async (text = '', modelName = 'gpt-3.5-turbo') => {
|
const countTokens = async (text = '', modelName = 'gpt-3.5-turbo') => {
|
||||||
let encoder = null;
|
let encoder = null;
|
||||||
|
|
@ -12,7 +13,7 @@ const countTokens = async (text = '', modelName = 'gpt-3.5-turbo') => {
|
||||||
encoder.free();
|
encoder.free();
|
||||||
return tokens.length;
|
return tokens.length;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error(e);
|
logger.error('[countTokens]', e);
|
||||||
if (encoder) {
|
if (encoder) {
|
||||||
encoder.free();
|
encoder.free();
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
require('dotenv').config();
|
||||||
|
|
||||||
const crypto = require('crypto');
|
const crypto = require('crypto');
|
||||||
const key = Buffer.from(process.env.CREDS_KEY, 'hex');
|
const key = Buffer.from(process.env.CREDS_KEY, 'hex');
|
||||||
const iv = Buffer.from(process.env.CREDS_IV, 'hex');
|
const iv = Buffer.from(process.env.CREDS_IV, 'hex');
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
const partialRight = require('lodash/partialRight');
|
const partialRight = require('lodash/partialRight');
|
||||||
const { getCitations, citeText } = require('./citations');
|
|
||||||
const { sendMessage } = require('./streamResponse');
|
const { sendMessage } = require('./streamResponse');
|
||||||
|
const { getCitations, citeText } = require('./citations');
|
||||||
const cursor = '<span className="result-streaming">█</span>';
|
const cursor = '<span className="result-streaming">█</span>';
|
||||||
const citationRegex = /\[\^\d+?\^]/g;
|
const citationRegex = /\[\^\d+?\^]/g;
|
||||||
|
|
||||||
|
|
@ -138,21 +138,31 @@ function formatAction(action) {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Checks if the given string value is truthy by comparing it to the string 'true' (case-insensitive).
|
* Checks if the given value is truthy by being either the boolean `true` or a string
|
||||||
|
* that case-insensitively matches 'true'.
|
||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
* @param {string|null|undefined} value - The string value to check.
|
* @param {string|boolean|null|undefined} value - The value to check.
|
||||||
* @returns {boolean} Returns `true` if the value is a case-insensitive match for the string 'true', otherwise returns `false`.
|
* @returns {boolean} Returns `true` if the value is the boolean `true` or a case-insensitive
|
||||||
|
* match for the string 'true', otherwise returns `false`.
|
||||||
* @example
|
* @example
|
||||||
*
|
*
|
||||||
* isEnabled("True"); // returns true
|
* isEnabled("True"); // returns true
|
||||||
* isEnabled("TRUE"); // returns true
|
* isEnabled("TRUE"); // returns true
|
||||||
|
* isEnabled(true); // returns true
|
||||||
* isEnabled("false"); // returns false
|
* isEnabled("false"); // returns false
|
||||||
|
* isEnabled(false); // returns false
|
||||||
* isEnabled(null); // returns false
|
* isEnabled(null); // returns false
|
||||||
* isEnabled(); // returns false
|
* isEnabled(); // returns false
|
||||||
*/
|
*/
|
||||||
function isEnabled(value) {
|
function isEnabled(value) {
|
||||||
return value?.toLowerCase()?.trim() === 'true';
|
if (typeof value === 'boolean') {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
if (typeof value === 'string') {
|
||||||
|
return value.toLowerCase().trim() === 'true';
|
||||||
|
}
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
|
|
||||||
51
api/server/utils/handleText.spec.js
Normal file
51
api/server/utils/handleText.spec.js
Normal file
|
|
@ -0,0 +1,51 @@
|
||||||
|
const { isEnabled } = require('./handleText');
|
||||||
|
|
||||||
|
describe('isEnabled', () => {
|
||||||
|
test('should return true when input is "true"', () => {
|
||||||
|
expect(isEnabled('true')).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return true when input is "TRUE"', () => {
|
||||||
|
expect(isEnabled('TRUE')).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return true when input is true', () => {
|
||||||
|
expect(isEnabled(true)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return false when input is "false"', () => {
|
||||||
|
expect(isEnabled('false')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return false when input is false', () => {
|
||||||
|
expect(isEnabled(false)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return false when input is null', () => {
|
||||||
|
expect(isEnabled(null)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return false when input is undefined', () => {
|
||||||
|
expect(isEnabled()).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return false when input is an empty string', () => {
|
||||||
|
expect(isEnabled('')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return false when input is a whitespace string', () => {
|
||||||
|
expect(isEnabled(' ')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return false when input is a number', () => {
|
||||||
|
expect(isEnabled(123)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return false when input is an object', () => {
|
||||||
|
expect(isEnabled({})).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return false when input is an array', () => {
|
||||||
|
expect(isEnabled([])).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
@ -38,8 +38,7 @@ function math(str, fallbackValue) {
|
||||||
if (fallback) {
|
if (fallback) {
|
||||||
return fallbackValue;
|
return fallbackValue;
|
||||||
}
|
}
|
||||||
console.error('str', str);
|
throw new Error(`[math] str did not evaluate to a number but to a ${typeof value}`);
|
||||||
throw new Error(`str did not evaluate to a number but to a ${typeof value}`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return value;
|
return value;
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,8 @@
|
||||||
const nodemailer = require('nodemailer');
|
|
||||||
const handlebars = require('handlebars');
|
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
|
const nodemailer = require('nodemailer');
|
||||||
|
const handlebars = require('handlebars');
|
||||||
|
const logger = require('~/config/winston');
|
||||||
|
|
||||||
const sendEmail = async (email, subject, payload, template) => {
|
const sendEmail = async (email, subject, payload, template) => {
|
||||||
try {
|
try {
|
||||||
|
|
@ -58,15 +59,15 @@ const sendEmail = async (email, subject, payload, template) => {
|
||||||
// Send email
|
// Send email
|
||||||
transporter.sendMail(options(), (error, info) => {
|
transporter.sendMail(options(), (error, info) => {
|
||||||
if (error) {
|
if (error) {
|
||||||
console.log(error);
|
logger.error('[sendEmail]', error);
|
||||||
return error;
|
return error;
|
||||||
} else {
|
} else {
|
||||||
console.log(info);
|
logger.debug('[sendEmail]', info);
|
||||||
return info;
|
return info;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log(error);
|
logger.error('[sendEmail]', error);
|
||||||
return error;
|
return error;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue