mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-01-21 09:46:12 +01:00
Feat: PaLM 2 (#262)
* feat(api): add googleapis package to package.json
feat(api): add reqDemo.js file to make a request to Google Cloud AI Platform API to get a response from a chatbot model.
* feat: add PaLM2 support
* feat(conversationPreset.js): add support for topP and topK for google endpoint
feat(askGoogle.js): add support for topP and topK for google endpoint
feat(ask/index.js): add google endpoint
feat(endpoints.js): add google endpoint
feat(MessageHeader.jsx): add support for modelLabel for google endpoint
feat(PresetItem.jsx): add support for modelLabel for google endpoint
feat(HoverButtons.jsx): add support for google endpoint
feat(createPayload.ts): add google endpoint
feat(types.ts): add google endpoint
feat(store/endpoints.js): add google endpoint
feat(cleanupPreset.js): add support for topP and topK for google endpoint
feat(getDefaultConversation.js): add support for topP and topK for google endpoint
feat(handleSubmit.js): add support for topP and topK for google endpoint
* fix: messages payload
* refactor(GoogleClient.js): set maxContextTokens based on isTextModel value
feat(GoogleClient.js): add delay option to TextStream constructor
feat(getIcon.jsx): add support for google endpoint and PaLM2 model label
* feat: palm frontend changes
* feat(askGoogle.js): set default example to empty input and output
feat(Examples.jsx): add ability to add and remove examples
refactor(Settings.jsx): remove examples from props and setOption function
style(GoogleOptions): remove unnecessary whitespace after Settings2 import
feat(GoogleOptions): add addExample and removeExample functions to manage examples
fix(cleanupPreset): set default example to [{ input: '', output: ''}]
fix(getDefaultConversation): set default example to [{ input: '', output: ''}]
fix(handleSubmit): set default example to [{ input: '', output: ''}]
* style(client): adjust height of settings and examples components to 350px
fix(client): fix path to palm.png image in getIcon.jsx file
* style(EndpointOptionsPopover.jsx, Examples.jsx, Settings.jsx): improve button styles and update input placeholders
* feat (palm): finalize examples on the frontend
* feat(GoogleClient.js): filter out empty examples in options
feat(GoogleClient.js): add support for promptPrefix in buildPayload method
feat(GoogleClient.js): add support for examples in buildPayload method
feat(conversationPreset.js): add maxOutputTokens field to conversation preset schema
feat(presetSchema.js): add examples field to preset schema
feat(askGoogle.js): add support for examples and promptPrefix in endpointOption
feat(EditPresetDialog.jsx): add Examples component for Google endpoint
feat(EditPresetDialog.jsx): add button to show/hide Examples component
feat(EditPresetDialog.jsx): add functionality to add, remove, and edit examples in Examples component
feat(EndpointOptionsDialog.jsx): change endpoint name to PaLM for Google endpoint
feat(Settings.jsx): add maxHeight prop to limit height of Settings component in EditPresetDialog and EndpointOptionsDialog
fix(Settings.jsx): add examples prop to ChatGPTBrowser component
fix(EndpointItem.jsx): add alternate name for google endpoint
fix(MessageHeader.jsx): change title for google endpoint to PaLM
feat(endpoints.js): add google endpoint to endpointsConfig
fix(cleanupPreset.js): add missing comma in examples array
* chore: change endpoint order
* feat(PaLM 2): complete for testing
* fix(PaLM): handle blocked messages
This commit is contained in:
parent
95c97561ae
commit
3414690e42
40 changed files with 2369 additions and 79 deletions
|
|
@ -89,6 +89,33 @@ CHATGPT_MODELS=text-davinci-002-render-sha,text-davinci-002-render-paid,gpt-4
|
|||
# By default, the server will use the node-chatgpt-api recommended proxy (a third party server).
|
||||
# CHATGPT_REVERSE_PROXY=
|
||||
|
||||
##########################
|
||||
# PaLM (Google) Endpoint:
|
||||
##########################
|
||||
|
||||
# PaLM 2 Client (via Google Cloud Vertex AI API)
|
||||
# Steps:
|
||||
# Enable the Vertex AI API on Google Cloud:
|
||||
# https://console.cloud.google.com/vertex-ai
|
||||
# Create a Service Account:
|
||||
# https://console.cloud.google.com/projectselector/iam-admin/serviceaccounts/create?walkthrough_id=iam--create-service-account#step_index=1
|
||||
# Make sure to click 'Create and Continue' to give at least the 'Vertex AI User' role.
|
||||
# Create a JSON key, rename as 'auth.json' and save it in /api/data/.
|
||||
# Alternatively
|
||||
# Uncomment below PALM_KEY and set as "user_provided" to allow the user to provide a Service Account key JSON from the UI.
|
||||
# They will follow the steps above except for renaming the file.
|
||||
# Leave blank or omit to disable this endpoint
|
||||
|
||||
# PALM_KEY="user_provided"
|
||||
|
||||
# In case you need a reverse proxy for this endpoint:
|
||||
# GOOGLE_REVERSE_PROXY=
|
||||
|
||||
##########################
|
||||
# Proxy: To be Used by all endpoints
|
||||
##########################
|
||||
PROXY=
|
||||
|
||||
##########################
|
||||
# Search:
|
||||
##########################
|
||||
|
|
|
|||
390
api/app/google/GoogleClient.js
Normal file
390
api/app/google/GoogleClient.js
Normal file
|
|
@ -0,0 +1,390 @@
|
|||
const crypto = require('crypto');
|
||||
const TextStream = require('../stream');
|
||||
const { google } = require('googleapis');
|
||||
const { Agent, ProxyAgent } = require('undici');
|
||||
const { getMessages, saveMessage, saveConvo } = require('../../models');
|
||||
const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = require('@dqbd/tiktoken');
|
||||
|
||||
const tokenizersCache = {};
|
||||
|
||||
class GoogleAgent {
|
||||
constructor(credentials, options = {}) {
|
||||
this.client_email = credentials.client_email;
|
||||
this.project_id = credentials.project_id;
|
||||
this.private_key = credentials.private_key;
|
||||
this.setOptions(options);
|
||||
this.currentDateString = new Date().toLocaleDateString('en-us', {
|
||||
year: 'numeric',
|
||||
month: 'long',
|
||||
day: 'numeric'
|
||||
});
|
||||
}
|
||||
|
||||
constructUrl() {
|
||||
return `https://us-central1-aiplatform.googleapis.com/v1/projects/${this.project_id}/locations/us-central1/publishers/google/models/${this.modelOptions.model}:predict`;
|
||||
}
|
||||
|
||||
setOptions(options) {
|
||||
if (this.options && !this.options.replaceOptions) {
|
||||
// nested options aren't spread properly, so we need to do this manually
|
||||
this.options.modelOptions = {
|
||||
...this.options.modelOptions,
|
||||
...options.modelOptions
|
||||
};
|
||||
delete options.modelOptions;
|
||||
// now we can merge options
|
||||
this.options = {
|
||||
...this.options,
|
||||
...options
|
||||
};
|
||||
} else {
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
this.options.examples = this.options.examples.filter(
|
||||
obj => obj.input.content !== '' && obj.output.content !== ''
|
||||
);
|
||||
|
||||
const modelOptions = this.options.modelOptions || {};
|
||||
this.modelOptions = {
|
||||
...modelOptions,
|
||||
// set some good defaults (check for undefined in some cases because they may be 0)
|
||||
model: modelOptions.model || 'chat-bison',
|
||||
temperature: typeof modelOptions.temperature === 'undefined' ? 0.2 : modelOptions.temperature, // 0 - 1, 0.2 is recommended
|
||||
topP: typeof modelOptions.topP === 'undefined' ? 0.95 : modelOptions.topP, // 0 - 1, default: 0.95
|
||||
topK: typeof modelOptions.topK === 'undefined' ? 40 : modelOptions.topK // 1-40, default: 40
|
||||
// stop: modelOptions.stop // no stop method for now
|
||||
};
|
||||
|
||||
this.isChatModel = this.modelOptions.model.startsWith('chat-');
|
||||
const { isChatModel } = this;
|
||||
this.isTextModel = this.modelOptions.model.startsWith('text-');
|
||||
const { isTextModel } = this;
|
||||
|
||||
this.maxContextTokens = this.options.maxContextTokens || (isTextModel ? 8000 : 4096);
|
||||
// The max prompt tokens is determined by the max context tokens minus the max response tokens.
|
||||
// Earlier messages will be dropped until the prompt is within the limit.
|
||||
this.maxResponseTokens = this.modelOptions.maxOutputTokens || 1024;
|
||||
this.maxPromptTokens = this.options.maxPromptTokens || this.maxContextTokens - this.maxResponseTokens;
|
||||
|
||||
if (this.maxPromptTokens + this.maxResponseTokens > this.maxContextTokens) {
|
||||
throw new Error(
|
||||
`maxPromptTokens + maxOutputTokens (${this.maxPromptTokens} + ${this.maxResponseTokens} = ${
|
||||
this.maxPromptTokens + this.maxResponseTokens
|
||||
}) must be less than or equal to maxContextTokens (${this.maxContextTokens})`
|
||||
);
|
||||
}
|
||||
|
||||
this.userLabel = this.options.userLabel || 'User';
|
||||
this.modelLabel = this.options.modelLabel || 'Assistant';
|
||||
|
||||
if (isChatModel) {
|
||||
// Use these faux tokens to help the AI understand the context since we are building the chat log ourselves.
|
||||
// Trying to use "<|im_start|>" causes the AI to still generate "<" or "<|" at the end sometimes for some reason,
|
||||
// without tripping the stop sequences, so I'm using "||>" instead.
|
||||
this.startToken = '||>';
|
||||
this.endToken = '';
|
||||
this.gptEncoder = this.constructor.getTokenizer('cl100k_base');
|
||||
} else if (isTextModel) {
|
||||
this.startToken = '<|im_start|>';
|
||||
this.endToken = '<|im_end|>';
|
||||
this.gptEncoder = this.constructor.getTokenizer('text-davinci-003', true, {
|
||||
'<|im_start|>': 100264,
|
||||
'<|im_end|>': 100265
|
||||
});
|
||||
} else {
|
||||
// Previously I was trying to use "<|endoftext|>" but there seems to be some bug with OpenAI's token counting
|
||||
// system that causes only the first "<|endoftext|>" to be counted as 1 token, and the rest are not treated
|
||||
// as a single token. So we're using this instead.
|
||||
this.startToken = '||>';
|
||||
this.endToken = '';
|
||||
try {
|
||||
this.gptEncoder = this.constructor.getTokenizer(this.modelOptions.model, true);
|
||||
} catch {
|
||||
this.gptEncoder = this.constructor.getTokenizer('text-davinci-003', true);
|
||||
}
|
||||
}
|
||||
|
||||
if (!this.modelOptions.stop) {
|
||||
const stopTokens = [this.startToken];
|
||||
if (this.endToken && this.endToken !== this.startToken) {
|
||||
stopTokens.push(this.endToken);
|
||||
}
|
||||
stopTokens.push(`\n${this.userLabel}:`);
|
||||
stopTokens.push('<|diff_marker|>');
|
||||
// I chose not to do one for `modelLabel` because I've never seen it happen
|
||||
this.modelOptions.stop = stopTokens;
|
||||
}
|
||||
|
||||
if (this.options.reverseProxyUrl) {
|
||||
this.completionsUrl = this.options.reverseProxyUrl;
|
||||
} else {
|
||||
this.completionsUrl = this.constructUrl();
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
static getTokenizer(encoding, isModelName = false, extendSpecialTokens = {}) {
|
||||
if (tokenizersCache[encoding]) {
|
||||
return tokenizersCache[encoding];
|
||||
}
|
||||
let tokenizer;
|
||||
if (isModelName) {
|
||||
tokenizer = encodingForModel(encoding, extendSpecialTokens);
|
||||
} else {
|
||||
tokenizer = getEncoding(encoding, extendSpecialTokens);
|
||||
}
|
||||
tokenizersCache[encoding] = tokenizer;
|
||||
return tokenizer;
|
||||
}
|
||||
|
||||
async getClient() {
|
||||
const scopes = ['https://www.googleapis.com/auth/cloud-platform'];
|
||||
const jwtClient = new google.auth.JWT(this.client_email, null, this.private_key, scopes);
|
||||
|
||||
jwtClient.authorize((err) => {
|
||||
if (err) {
|
||||
console.log(err);
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
|
||||
return jwtClient;
|
||||
}
|
||||
|
||||
buildPayload(input, { messages = [] }) {
|
||||
let payload = {
|
||||
instances: [
|
||||
{
|
||||
messages: [...messages, { author: this.userLabel, content: input }]
|
||||
}
|
||||
],
|
||||
parameters: this.options.modelOptions
|
||||
};
|
||||
|
||||
if (this.options.promptPrefix) {
|
||||
payload.instances[0].context = this.options.promptPrefix;
|
||||
}
|
||||
|
||||
if (this.options.examples.length > 0) {
|
||||
payload.instances[0].examples = this.options.examples;
|
||||
}
|
||||
|
||||
if (this.isTextModel) {
|
||||
payload.instances = [
|
||||
{
|
||||
prompt: input
|
||||
}
|
||||
];
|
||||
}
|
||||
|
||||
if (this.options.debug) {
|
||||
console.debug('buildPayload');
|
||||
console.dir(payload, { depth: null });
|
||||
}
|
||||
|
||||
return payload;
|
||||
}
|
||||
|
||||
async getCompletion(input, messages = [], abortController = null) {
|
||||
if (!abortController) {
|
||||
abortController = new AbortController();
|
||||
}
|
||||
const { debug } = this.options;
|
||||
const url = this.completionsUrl;
|
||||
if (debug) {
|
||||
console.debug();
|
||||
console.debug(url);
|
||||
console.debug(this.modelOptions);
|
||||
console.debug();
|
||||
}
|
||||
const opts = {
|
||||
method: 'POST',
|
||||
agent: new Agent({
|
||||
bodyTimeout: 0,
|
||||
headersTimeout: 0
|
||||
}),
|
||||
signal: abortController.signal
|
||||
};
|
||||
|
||||
if (this.options.proxy) {
|
||||
opts.agent = new ProxyAgent(this.options.proxy);
|
||||
}
|
||||
|
||||
const client = await this.getClient();
|
||||
const payload = this.buildPayload(input, { messages });
|
||||
const res = await client.request({ url, method: 'POST', data: payload });
|
||||
console.dir(res.data, { depth: null });
|
||||
return res.data;
|
||||
}
|
||||
|
||||
async loadHistory(conversationId, parentMessageId = null) {
|
||||
if (this.options.debug) {
|
||||
console.debug('Loading history for conversation', conversationId, parentMessageId);
|
||||
}
|
||||
|
||||
if (!parentMessageId) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const messages = (await getMessages({ conversationId })) || [];
|
||||
|
||||
if (messages.length === 0) {
|
||||
this.currentMessages = [];
|
||||
return [];
|
||||
}
|
||||
|
||||
const orderedMessages = this.constructor.getMessagesForConversation(messages, parentMessageId);
|
||||
return orderedMessages.map((message) => {
|
||||
return {
|
||||
author: message.isCreatedByUser ? this.userLabel : this.modelLabel,
|
||||
content: message.content
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async saveMessageToDatabase(message, user = null) {
|
||||
await saveMessage({ ...message, unfinished: false });
|
||||
await saveConvo(user, {
|
||||
conversationId: message.conversationId,
|
||||
endpoint: 'google',
|
||||
...this.modelOptions
|
||||
});
|
||||
}
|
||||
|
||||
async sendMessage(message, opts = {}) {
|
||||
if (opts && typeof opts === 'object') {
|
||||
this.setOptions(opts);
|
||||
}
|
||||
console.log('sendMessage', message, opts);
|
||||
|
||||
const user = opts.user || null;
|
||||
const conversationId = opts.conversationId || crypto.randomUUID();
|
||||
const parentMessageId = opts.parentMessageId || '00000000-0000-0000-0000-000000000000';
|
||||
const userMessageId = crypto.randomUUID();
|
||||
const responseMessageId = crypto.randomUUID();
|
||||
const messages = await this.loadHistory(conversationId, this.options?.parentMessageId);
|
||||
|
||||
const userMessage = {
|
||||
messageId: userMessageId,
|
||||
parentMessageId,
|
||||
conversationId,
|
||||
sender: 'User',
|
||||
text: message,
|
||||
isCreatedByUser: true
|
||||
};
|
||||
|
||||
if (typeof opts?.getIds === 'function') {
|
||||
opts.getIds({
|
||||
userMessage,
|
||||
conversationId,
|
||||
responseMessageId
|
||||
});
|
||||
}
|
||||
|
||||
console.log('userMessage', userMessage);
|
||||
|
||||
await this.saveMessageToDatabase(userMessage, user);
|
||||
let reply = '';
|
||||
let blocked = false;
|
||||
try {
|
||||
const result = await this.getCompletion(message, messages, opts.abortController);
|
||||
blocked = result?.predictions?.[0]?.safetyAttributes?.blocked;
|
||||
reply = result?.predictions?.[0]?.candidates?.[0]?.content || result?.predictions?.[0]?.content || '';
|
||||
if (blocked === true) {
|
||||
reply = `Google blocked a proper response to your message:\n${JSON.stringify(
|
||||
result.predictions[0].safetyAttributes
|
||||
)}${reply.length > 0 ? `\nAI Response:\n${reply}` : ''}`;
|
||||
}
|
||||
if (this.options.debug) {
|
||||
console.debug('result');
|
||||
console.debug(result);
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
}
|
||||
|
||||
if (this.options.debug) {
|
||||
console.debug('options');
|
||||
console.debug(this.options);
|
||||
}
|
||||
|
||||
if (!blocked) {
|
||||
const textStream = new TextStream(reply, { delay: 0.5 });
|
||||
await textStream.processTextStream(opts.onProgress);
|
||||
}
|
||||
|
||||
const responseMessage = {
|
||||
messageId: responseMessageId,
|
||||
conversationId,
|
||||
parentMessageId: userMessage.messageId,
|
||||
sender: 'PaLM2',
|
||||
text: reply,
|
||||
error: blocked,
|
||||
isCreatedByUser: false
|
||||
};
|
||||
|
||||
await this.saveMessageToDatabase(responseMessage, user);
|
||||
return responseMessage;
|
||||
}
|
||||
|
||||
getTokenCount(text) {
|
||||
return this.gptEncoder.encode(text, 'all').length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Algorithm adapted from "6. Counting tokens for chat API calls" of
|
||||
* https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb
|
||||
*
|
||||
* An additional 2 tokens need to be added for metadata after all messages have been counted.
|
||||
*
|
||||
* @param {*} message
|
||||
*/
|
||||
getTokenCountForMessage(message) {
|
||||
// Map each property of the message to the number of tokens it contains
|
||||
const propertyTokenCounts = Object.entries(message).map(([key, value]) => {
|
||||
// Count the number of tokens in the property value
|
||||
const numTokens = this.getTokenCount(value);
|
||||
|
||||
// Subtract 1 token if the property key is 'name'
|
||||
const adjustment = key === 'name' ? 1 : 0;
|
||||
return numTokens - adjustment;
|
||||
});
|
||||
|
||||
// Sum the number of tokens in all properties and add 4 for metadata
|
||||
return propertyTokenCounts.reduce((a, b) => a + b, 4);
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterate through messages, building an array based on the parentMessageId.
|
||||
* Each message has an id and a parentMessageId. The parentMessageId is the id of the message that this message is a reply to.
|
||||
* @param messages
|
||||
* @param parentMessageId
|
||||
* @returns {*[]} An array containing the messages in the order they should be displayed, starting with the root message.
|
||||
*/
|
||||
static getMessagesForConversation(messages, parentMessageId) {
|
||||
const orderedMessages = [];
|
||||
let currentMessageId = parentMessageId;
|
||||
while (currentMessageId) {
|
||||
// eslint-disable-next-line no-loop-func
|
||||
const message = messages.find(m => m.messageId === currentMessageId);
|
||||
if (!message) {
|
||||
break;
|
||||
}
|
||||
orderedMessages.unshift(message);
|
||||
currentMessageId = message.parentMessageId;
|
||||
}
|
||||
|
||||
if (orderedMessages.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return orderedMessages.map(msg => ({
|
||||
isCreatedByUser: msg.isCreatedByUser,
|
||||
content: msg.text
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = GoogleAgent;
|
||||
62
api/app/stream.js
Normal file
62
api/app/stream.js
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
const { Readable } = require('stream');
|
||||
|
||||
class TextStream extends Readable {
|
||||
constructor(text, options = {}) {
|
||||
super(options);
|
||||
this.text = text;
|
||||
this.currentIndex = 0;
|
||||
this.delay = options.delay || 20; // Time in milliseconds
|
||||
}
|
||||
|
||||
_read() {
|
||||
const minChunkSize = 2;
|
||||
const maxChunkSize = 4;
|
||||
const { delay } = this;
|
||||
|
||||
if (this.currentIndex < this.text.length) {
|
||||
setTimeout(() => {
|
||||
const remainingChars = this.text.length - this.currentIndex;
|
||||
const chunkSize = Math.min(
|
||||
this.randomInt(minChunkSize, maxChunkSize + 1),
|
||||
remainingChars
|
||||
);
|
||||
|
||||
const chunk = this.text.slice(this.currentIndex, this.currentIndex + chunkSize);
|
||||
this.push(chunk);
|
||||
this.currentIndex += chunkSize;
|
||||
}, delay);
|
||||
} else {
|
||||
this.push(null); // signal end of data
|
||||
}
|
||||
}
|
||||
|
||||
randomInt(min, max) {
|
||||
return Math.floor(Math.random() * (max - min)) + min;
|
||||
}
|
||||
|
||||
async processTextStream(onProgressCallback) {
|
||||
const streamPromise = new Promise((resolve, reject) => {
|
||||
this.on('data', (chunk) => {
|
||||
onProgressCallback(chunk.toString());
|
||||
});
|
||||
|
||||
this.on('end', () => {
|
||||
console.log('Stream ended');
|
||||
resolve();
|
||||
});
|
||||
|
||||
this.on('error', (err) => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
|
||||
try {
|
||||
await streamPromise;
|
||||
} catch (err) {
|
||||
console.error('Error processing text stream:', err);
|
||||
// Handle the error appropriately, e.g., return an error message or throw an error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = TextStream;
|
||||
|
|
@ -17,6 +17,12 @@ module.exports = {
|
|||
default: null,
|
||||
required: false
|
||||
},
|
||||
// for google only
|
||||
modelLabel: {
|
||||
type: String,
|
||||
default: null,
|
||||
required: false
|
||||
},
|
||||
promptPrefix: {
|
||||
type: String,
|
||||
default: null,
|
||||
|
|
@ -32,6 +38,22 @@ module.exports = {
|
|||
default: 1,
|
||||
required: false
|
||||
},
|
||||
// for google only
|
||||
topP: {
|
||||
type: Number,
|
||||
default: 0.95,
|
||||
required: false
|
||||
},
|
||||
topK: {
|
||||
type: Number,
|
||||
default: 40,
|
||||
required: false
|
||||
},
|
||||
maxOutputTokens: {
|
||||
type: Number,
|
||||
default: 1024,
|
||||
required: false
|
||||
},
|
||||
presence_penalty: {
|
||||
type: Number,
|
||||
default: 0,
|
||||
|
|
|
|||
|
|
@ -20,6 +20,8 @@ const convoSchema = mongoose.Schema(
|
|||
default: null
|
||||
},
|
||||
messages: [{ type: mongoose.Schema.Types.ObjectId, ref: 'Message' }],
|
||||
// google only
|
||||
examples: [{ type: mongoose.Schema.Types.Mixed }],
|
||||
...conversationPreset,
|
||||
// for bingAI only
|
||||
jailbreakConversationId: {
|
||||
|
|
|
|||
|
|
@ -17,6 +17,8 @@ const presetSchema = mongoose.Schema(
|
|||
type: String,
|
||||
default: null
|
||||
},
|
||||
// google only
|
||||
examples: [{ type: mongoose.Schema.Types.Mixed }],
|
||||
...conversationPreset
|
||||
},
|
||||
{ timestamps: true }
|
||||
|
|
|
|||
357
api/package-lock.json
generated
357
api/package-lock.json
generated
|
|
@ -22,6 +22,7 @@
|
|||
"dotenv": "^16.0.3",
|
||||
"eslint": "^8.36.0",
|
||||
"express": "^4.18.2",
|
||||
"googleapis": "^118.0.0",
|
||||
"handlebars": "^4.7.7",
|
||||
"html": "^1.0.0",
|
||||
"joi": "^14.3.1",
|
||||
|
|
@ -1998,6 +1999,14 @@
|
|||
"resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
|
||||
"integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg=="
|
||||
},
|
||||
"node_modules/arrify": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz",
|
||||
"integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/asynckit": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
||||
|
|
@ -2081,6 +2090,14 @@
|
|||
"resolved": "https://registry.npmjs.org/bcryptjs/-/bcryptjs-2.4.3.tgz",
|
||||
"integrity": "sha512-V/Hy/X9Vt7f3BbPJEi8BdVFMByHi+jNXrYkW3huaybV/kQ0KJg0Y6PkEMbn+zeT+i+SiKZ/HMqJGIIt4LZDqNQ=="
|
||||
},
|
||||
"node_modules/bignumber.js": {
|
||||
"version": "9.1.1",
|
||||
"resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.1.1.tgz",
|
||||
"integrity": "sha512-pHm4LsMJ6lzgNGVfZHjMoO8sdoRhOzOH4MLmY65Jg70bpxCKu5iOHNJyfF6OyvYw7t8Fpf35RuzUyqnQsj8Vig==",
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/binary-extensions": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz",
|
||||
|
|
@ -3017,6 +3034,11 @@
|
|||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="
|
||||
},
|
||||
"node_modules/extend": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
|
||||
"integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="
|
||||
},
|
||||
"node_modules/external-editor": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz",
|
||||
|
|
@ -3109,6 +3131,11 @@
|
|||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/fast-text-encoding": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.6.tgz",
|
||||
"integrity": "sha512-VhXlQgj9ioXCqGstD37E/HBeqEGV/qOD/kmbVG8h5xKBYvM1L3lR1Zn4555cQ8GkYbJa8aJSipLPndE1k6zK2w=="
|
||||
},
|
||||
"node_modules/fast-uri": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-2.2.0.tgz",
|
||||
|
|
@ -3445,6 +3472,32 @@
|
|||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/gaxios": {
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmjs.org/gaxios/-/gaxios-5.1.0.tgz",
|
||||
"integrity": "sha512-aezGIjb+/VfsJtIcHGcBSerNEDdfdHeMros+RbYbGpmonKWQCOVOes0LVZhn1lDtIgq55qq0HaxymIoae3Fl/A==",
|
||||
"dependencies": {
|
||||
"extend": "^3.0.2",
|
||||
"https-proxy-agent": "^5.0.0",
|
||||
"is-stream": "^2.0.0",
|
||||
"node-fetch": "^2.6.7"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/gcp-metadata": {
|
||||
"version": "5.2.0",
|
||||
"resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-5.2.0.tgz",
|
||||
"integrity": "sha512-aFhhvvNycky2QyhG+dcfEdHBF0FRbYcf39s6WNHUDysKSrbJ5vuFbjydxBcmewtXeV248GP8dWT3ByPNxsyHCw==",
|
||||
"dependencies": {
|
||||
"gaxios": "^5.0.0",
|
||||
"json-bigint": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/get-intrinsic": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.0.tgz",
|
||||
|
|
@ -3529,6 +3582,94 @@
|
|||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/google-auth-library": {
|
||||
"version": "8.8.0",
|
||||
"resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-8.8.0.tgz",
|
||||
"integrity": "sha512-0iJn7IDqObDG5Tu9Tn2WemmJ31ksEa96IyK0J0OZCpTh6CrC6FrattwKX87h3qKVuprCJpdOGKc1Xi8V0kMh8Q==",
|
||||
"dependencies": {
|
||||
"arrify": "^2.0.0",
|
||||
"base64-js": "^1.3.0",
|
||||
"ecdsa-sig-formatter": "^1.0.11",
|
||||
"fast-text-encoding": "^1.0.0",
|
||||
"gaxios": "^5.0.0",
|
||||
"gcp-metadata": "^5.2.0",
|
||||
"gtoken": "^6.1.0",
|
||||
"jws": "^4.0.0",
|
||||
"lru-cache": "^6.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/google-auth-library/node_modules/jwa": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz",
|
||||
"integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==",
|
||||
"dependencies": {
|
||||
"buffer-equal-constant-time": "1.0.1",
|
||||
"ecdsa-sig-formatter": "1.0.11",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/google-auth-library/node_modules/jws": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
|
||||
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
|
||||
"dependencies": {
|
||||
"jwa": "^2.0.0",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/google-p12-pem": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-4.0.1.tgz",
|
||||
"integrity": "sha512-WPkN4yGtz05WZ5EhtlxNDWPhC4JIic6G8ePitwUWy4l+XPVYec+a0j0Ts47PDtW59y3RwAhUd9/h9ZZ63px6RQ==",
|
||||
"dependencies": {
|
||||
"node-forge": "^1.3.1"
|
||||
},
|
||||
"bin": {
|
||||
"gp12-pem": "build/src/bin/gp12-pem.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/googleapis": {
|
||||
"version": "118.0.0",
|
||||
"resolved": "https://registry.npmjs.org/googleapis/-/googleapis-118.0.0.tgz",
|
||||
"integrity": "sha512-Ny6zJOGn5P/YDT6GQbJU6K0lSzEu4Yuxnsn45ZgBIeSQ1RM0FolEjUToLXquZd89DU9wUfqA5XYHPEctk1TFWg==",
|
||||
"dependencies": {
|
||||
"google-auth-library": "^8.0.2",
|
||||
"googleapis-common": "^6.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/googleapis-common": {
|
||||
"version": "6.0.4",
|
||||
"resolved": "https://registry.npmjs.org/googleapis-common/-/googleapis-common-6.0.4.tgz",
|
||||
"integrity": "sha512-m4ErxGE8unR1z0VajT6AYk3s6a9gIMM6EkDZfkPnES8joeOlEtFEJeF8IyZkb0tjPXkktUfYrE4b3Li1DNyOwA==",
|
||||
"dependencies": {
|
||||
"extend": "^3.0.2",
|
||||
"gaxios": "^5.0.1",
|
||||
"google-auth-library": "^8.0.2",
|
||||
"qs": "^6.7.0",
|
||||
"url-template": "^2.0.8",
|
||||
"uuid": "^9.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/googleapis-common/node_modules/uuid": {
|
||||
"version": "9.0.0",
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.0.tgz",
|
||||
"integrity": "sha512-MXcSTerfPa4uqyzStbRoTgt5XIe3x5+42+q1sDuy3R5MDk66URdLMOZe5aPX/SQd+kuYAh0FdP/pO28IkQyTeg==",
|
||||
"bin": {
|
||||
"uuid": "dist/bin/uuid"
|
||||
}
|
||||
},
|
||||
"node_modules/graceful-fs": {
|
||||
"version": "4.2.11",
|
||||
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
|
||||
|
|
@ -3539,6 +3680,38 @@
|
|||
"resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz",
|
||||
"integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ=="
|
||||
},
|
||||
"node_modules/gtoken": {
|
||||
"version": "6.1.2",
|
||||
"resolved": "https://registry.npmjs.org/gtoken/-/gtoken-6.1.2.tgz",
|
||||
"integrity": "sha512-4ccGpzz7YAr7lxrT2neugmXQ3hP9ho2gcaityLVkiUecAiwiy60Ii8gRbZeOsXV19fYaRjgBSshs8kXw+NKCPQ==",
|
||||
"dependencies": {
|
||||
"gaxios": "^5.0.1",
|
||||
"google-p12-pem": "^4.0.0",
|
||||
"jws": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/gtoken/node_modules/jwa": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz",
|
||||
"integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==",
|
||||
"dependencies": {
|
||||
"buffer-equal-constant-time": "1.0.1",
|
||||
"ecdsa-sig-formatter": "1.0.11",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/gtoken/node_modules/jws": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
|
||||
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
|
||||
"dependencies": {
|
||||
"jwa": "^2.0.0",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/handlebars": {
|
||||
"version": "4.7.7",
|
||||
"resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.7.tgz",
|
||||
|
|
@ -4025,6 +4198,14 @@
|
|||
"js-yaml": "bin/js-yaml.js"
|
||||
}
|
||||
},
|
||||
"node_modules/json-bigint": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz",
|
||||
"integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==",
|
||||
"dependencies": {
|
||||
"bignumber.js": "^9.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/json-buffer": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz",
|
||||
|
|
@ -4486,6 +4667,14 @@
|
|||
"webidl-conversions": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/node-forge": {
|
||||
"version": "1.3.1",
|
||||
"resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz",
|
||||
"integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==",
|
||||
"engines": {
|
||||
"node": ">= 6.13.0"
|
||||
}
|
||||
},
|
||||
"node_modules/nodemailer": {
|
||||
"version": "6.9.1",
|
||||
"resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-6.9.1.tgz",
|
||||
|
|
@ -5903,6 +6092,11 @@
|
|||
"punycode": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/url-template": {
|
||||
"version": "2.0.8",
|
||||
"resolved": "https://registry.npmjs.org/url-template/-/url-template-2.0.8.tgz",
|
||||
"integrity": "sha512-XdVKMF4SJ0nP/O7XIPB0JwAEuT9lDIYnNsK8yGVe43y0AWoKeJNdv3ZNWh7ksJ6KqQFjOO6ox/VEitLnaVNufw=="
|
||||
},
|
||||
"node_modules/util": {
|
||||
"version": "0.10.4",
|
||||
"resolved": "https://registry.npmjs.org/util/-/util-0.10.4.tgz",
|
||||
|
|
@ -7901,6 +8095,11 @@
|
|||
"resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
|
||||
"integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg=="
|
||||
},
|
||||
"arrify": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz",
|
||||
"integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug=="
|
||||
},
|
||||
"asynckit": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
||||
|
|
@ -7960,6 +8159,11 @@
|
|||
"resolved": "https://registry.npmjs.org/bcryptjs/-/bcryptjs-2.4.3.tgz",
|
||||
"integrity": "sha512-V/Hy/X9Vt7f3BbPJEi8BdVFMByHi+jNXrYkW3huaybV/kQ0KJg0Y6PkEMbn+zeT+i+SiKZ/HMqJGIIt4LZDqNQ=="
|
||||
},
|
||||
"bignumber.js": {
|
||||
"version": "9.1.1",
|
||||
"resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.1.1.tgz",
|
||||
"integrity": "sha512-pHm4LsMJ6lzgNGVfZHjMoO8sdoRhOzOH4MLmY65Jg70bpxCKu5iOHNJyfF6OyvYw7t8Fpf35RuzUyqnQsj8Vig=="
|
||||
},
|
||||
"binary-extensions": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz",
|
||||
|
|
@ -8644,6 +8848,11 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"extend": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
|
||||
"integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="
|
||||
},
|
||||
"external-editor": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz",
|
||||
|
|
@ -8728,6 +8937,11 @@
|
|||
"resolved": "https://registry.npmjs.org/fast-redact/-/fast-redact-3.1.2.tgz",
|
||||
"integrity": "sha512-+0em+Iya9fKGfEQGcd62Yv6onjBmmhV1uh86XVfOU8VwAe6kaFdQCWI9s0/Nnugx5Vd9tdbZ7e6gE2tR9dzXdw=="
|
||||
},
|
||||
"fast-text-encoding": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.6.tgz",
|
||||
"integrity": "sha512-VhXlQgj9ioXCqGstD37E/HBeqEGV/qOD/kmbVG8h5xKBYvM1L3lR1Zn4555cQ8GkYbJa8aJSipLPndE1k6zK2w=="
|
||||
},
|
||||
"fast-uri": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-2.2.0.tgz",
|
||||
|
|
@ -8983,6 +9197,26 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"gaxios": {
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmjs.org/gaxios/-/gaxios-5.1.0.tgz",
|
||||
"integrity": "sha512-aezGIjb+/VfsJtIcHGcBSerNEDdfdHeMros+RbYbGpmonKWQCOVOes0LVZhn1lDtIgq55qq0HaxymIoae3Fl/A==",
|
||||
"requires": {
|
||||
"extend": "^3.0.2",
|
||||
"https-proxy-agent": "^5.0.0",
|
||||
"is-stream": "^2.0.0",
|
||||
"node-fetch": "^2.6.7"
|
||||
}
|
||||
},
|
||||
"gcp-metadata": {
|
||||
"version": "5.2.0",
|
||||
"resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-5.2.0.tgz",
|
||||
"integrity": "sha512-aFhhvvNycky2QyhG+dcfEdHBF0FRbYcf39s6WNHUDysKSrbJ5vuFbjydxBcmewtXeV248GP8dWT3ByPNxsyHCw==",
|
||||
"requires": {
|
||||
"gaxios": "^5.0.0",
|
||||
"json-bigint": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"get-intrinsic": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.0.tgz",
|
||||
|
|
@ -9039,6 +9273,80 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"google-auth-library": {
|
||||
"version": "8.8.0",
|
||||
"resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-8.8.0.tgz",
|
||||
"integrity": "sha512-0iJn7IDqObDG5Tu9Tn2WemmJ31ksEa96IyK0J0OZCpTh6CrC6FrattwKX87h3qKVuprCJpdOGKc1Xi8V0kMh8Q==",
|
||||
"requires": {
|
||||
"arrify": "^2.0.0",
|
||||
"base64-js": "^1.3.0",
|
||||
"ecdsa-sig-formatter": "^1.0.11",
|
||||
"fast-text-encoding": "^1.0.0",
|
||||
"gaxios": "^5.0.0",
|
||||
"gcp-metadata": "^5.2.0",
|
||||
"gtoken": "^6.1.0",
|
||||
"jws": "^4.0.0",
|
||||
"lru-cache": "^6.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"jwa": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz",
|
||||
"integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==",
|
||||
"requires": {
|
||||
"buffer-equal-constant-time": "1.0.1",
|
||||
"ecdsa-sig-formatter": "1.0.11",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"jws": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
|
||||
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
|
||||
"requires": {
|
||||
"jwa": "^2.0.0",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"google-p12-pem": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-4.0.1.tgz",
|
||||
"integrity": "sha512-WPkN4yGtz05WZ5EhtlxNDWPhC4JIic6G8ePitwUWy4l+XPVYec+a0j0Ts47PDtW59y3RwAhUd9/h9ZZ63px6RQ==",
|
||||
"requires": {
|
||||
"node-forge": "^1.3.1"
|
||||
}
|
||||
},
|
||||
"googleapis": {
|
||||
"version": "118.0.0",
|
||||
"resolved": "https://registry.npmjs.org/googleapis/-/googleapis-118.0.0.tgz",
|
||||
"integrity": "sha512-Ny6zJOGn5P/YDT6GQbJU6K0lSzEu4Yuxnsn45ZgBIeSQ1RM0FolEjUToLXquZd89DU9wUfqA5XYHPEctk1TFWg==",
|
||||
"requires": {
|
||||
"google-auth-library": "^8.0.2",
|
||||
"googleapis-common": "^6.0.0"
|
||||
}
|
||||
},
|
||||
"googleapis-common": {
|
||||
"version": "6.0.4",
|
||||
"resolved": "https://registry.npmjs.org/googleapis-common/-/googleapis-common-6.0.4.tgz",
|
||||
"integrity": "sha512-m4ErxGE8unR1z0VajT6AYk3s6a9gIMM6EkDZfkPnES8joeOlEtFEJeF8IyZkb0tjPXkktUfYrE4b3Li1DNyOwA==",
|
||||
"requires": {
|
||||
"extend": "^3.0.2",
|
||||
"gaxios": "^5.0.1",
|
||||
"google-auth-library": "^8.0.2",
|
||||
"qs": "^6.7.0",
|
||||
"url-template": "^2.0.8",
|
||||
"uuid": "^9.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"uuid": {
|
||||
"version": "9.0.0",
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.0.tgz",
|
||||
"integrity": "sha512-MXcSTerfPa4uqyzStbRoTgt5XIe3x5+42+q1sDuy3R5MDk66URdLMOZe5aPX/SQd+kuYAh0FdP/pO28IkQyTeg=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"graceful-fs": {
|
||||
"version": "4.2.11",
|
||||
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
|
||||
|
|
@ -9049,6 +9357,37 @@
|
|||
"resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz",
|
||||
"integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ=="
|
||||
},
|
||||
"gtoken": {
|
||||
"version": "6.1.2",
|
||||
"resolved": "https://registry.npmjs.org/gtoken/-/gtoken-6.1.2.tgz",
|
||||
"integrity": "sha512-4ccGpzz7YAr7lxrT2neugmXQ3hP9ho2gcaityLVkiUecAiwiy60Ii8gRbZeOsXV19fYaRjgBSshs8kXw+NKCPQ==",
|
||||
"requires": {
|
||||
"gaxios": "^5.0.1",
|
||||
"google-p12-pem": "^4.0.0",
|
||||
"jws": "^4.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"jwa": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz",
|
||||
"integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==",
|
||||
"requires": {
|
||||
"buffer-equal-constant-time": "1.0.1",
|
||||
"ecdsa-sig-formatter": "1.0.11",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"jws": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz",
|
||||
"integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==",
|
||||
"requires": {
|
||||
"jwa": "^2.0.0",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"handlebars": {
|
||||
"version": "4.7.7",
|
||||
"resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.7.tgz",
|
||||
|
|
@ -9387,6 +9726,14 @@
|
|||
"argparse": "^2.0.1"
|
||||
}
|
||||
},
|
||||
"json-bigint": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz",
|
||||
"integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==",
|
||||
"requires": {
|
||||
"bignumber.js": "^9.0.0"
|
||||
}
|
||||
},
|
||||
"json-buffer": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz",
|
||||
|
|
@ -9747,6 +10094,11 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"node-forge": {
|
||||
"version": "1.3.1",
|
||||
"resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz",
|
||||
"integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA=="
|
||||
},
|
||||
"nodemailer": {
|
||||
"version": "6.9.1",
|
||||
"resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-6.9.1.tgz",
|
||||
|
|
@ -10802,6 +11154,11 @@
|
|||
"punycode": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"url-template": {
|
||||
"version": "2.0.8",
|
||||
"resolved": "https://registry.npmjs.org/url-template/-/url-template-2.0.8.tgz",
|
||||
"integrity": "sha512-XdVKMF4SJ0nP/O7XIPB0JwAEuT9lDIYnNsK8yGVe43y0AWoKeJNdv3ZNWh7ksJ6KqQFjOO6ox/VEitLnaVNufw=="
|
||||
},
|
||||
"util": {
|
||||
"version": "0.10.4",
|
||||
"resolved": "https://registry.npmjs.org/util/-/util-0.10.4.tgz",
|
||||
|
|
|
|||
|
|
@ -32,6 +32,7 @@
|
|||
"dotenv": "^16.0.3",
|
||||
"eslint": "^8.36.0",
|
||||
"express": "^4.18.2",
|
||||
"googleapis": "^118.0.0",
|
||||
"handlebars": "^4.7.7",
|
||||
"html": "^1.0.0",
|
||||
"joi": "^14.3.1",
|
||||
|
|
|
|||
156
api/server/routes/ask/askGoogle.js
Normal file
156
api/server/routes/ask/askGoogle.js
Normal file
|
|
@ -0,0 +1,156 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { titleConvo } = require('../../../app/');
|
||||
const GoogleClient = require('../../../app/google/GoogleClient');
|
||||
const { saveMessage, getConvoTitle, saveConvo, getConvo } = require('../../../models');
|
||||
const { handleError, sendMessage, createOnProgress } = require('./handlers');
|
||||
const requireJwtAuth = require('../../../middleware/requireJwtAuth');
|
||||
|
||||
router.post('/', requireJwtAuth, async (req, res) => {
|
||||
const { endpoint, text, parentMessageId, conversationId } = req.body;
|
||||
if (text.length === 0) return handleError(res, { text: 'Prompt empty or too short' });
|
||||
if (endpoint !== 'google') return handleError(res, { text: 'Illegal request' });
|
||||
|
||||
// build endpoint option
|
||||
const endpointOption = {
|
||||
examples: req.body?.examples ?? [{ input: { content: '' }, output: { content: '' } }],
|
||||
promptPrefix: req.body?.promptPrefix ?? null,
|
||||
token: req.body?.token ?? null,
|
||||
modelOptions: {
|
||||
model: req.body?.model ?? 'chat-bison',
|
||||
modelLabel: req.body?.modelLabel ?? null,
|
||||
temperature: req.body?.temperature ?? 0.2,
|
||||
maxOutputTokens: req.body?.maxOutputTokens ?? 1024,
|
||||
topP: req.body?.topP ?? 0.95,
|
||||
topK: req.body?.topK ?? 40
|
||||
}
|
||||
};
|
||||
|
||||
const availableModels = ['chat-bison', 'text-bison'];
|
||||
if (availableModels.find(model => model === endpointOption.modelOptions.model) === undefined) {
|
||||
return handleError(res, { text: `Illegal request: model` });
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-use-before-define
|
||||
return await ask({
|
||||
text,
|
||||
endpointOption,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
req,
|
||||
res
|
||||
});
|
||||
});
|
||||
|
||||
const ask = async ({ text, endpointOption, parentMessageId = null, conversationId, req, res }) => {
|
||||
res.writeHead(200, {
|
||||
Connection: 'keep-alive',
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache, no-transform',
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'X-Accel-Buffering': 'no'
|
||||
});
|
||||
let userMessage;
|
||||
let userMessageId;
|
||||
let responseMessageId;
|
||||
let lastSavedTimestamp = 0;
|
||||
|
||||
try {
|
||||
const getIds = (data) => {
|
||||
userMessage = data.userMessage;
|
||||
userMessageId = userMessage.messageId;
|
||||
responseMessageId = data.responseMessageId;
|
||||
if (!conversationId) {
|
||||
conversationId = data.conversationId;
|
||||
}
|
||||
};
|
||||
|
||||
const { onProgress: progressCallback } = createOnProgress({
|
||||
onProgress: ({ text: partialText }) => {
|
||||
const currentTimestamp = Date.now();
|
||||
if (currentTimestamp - lastSavedTimestamp > 500) {
|
||||
lastSavedTimestamp = currentTimestamp;
|
||||
saveMessage({
|
||||
messageId: responseMessageId,
|
||||
sender: 'PaLM2',
|
||||
conversationId,
|
||||
parentMessageId: userMessageId,
|
||||
text: partialText,
|
||||
unfinished: true,
|
||||
cancelled: false,
|
||||
error: false
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const abortController = new AbortController();
|
||||
|
||||
let key;
|
||||
if (endpointOption.token) {
|
||||
key = JSON.parse(endpointOption.token);
|
||||
delete endpointOption.token;
|
||||
console.log('Using service account key provided by User for PaLM models');
|
||||
}
|
||||
|
||||
try {
|
||||
if (!key) {
|
||||
key = require('../../data/auth.json');
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("No 'auth.json' file (service account key) found in /api/data/ for PaLM models");
|
||||
}
|
||||
|
||||
const clientOptions = {
|
||||
// debug: true, // for testing
|
||||
reverseProxyUrl: process.env.GOOGLE_REVERSE_PROXY || null,
|
||||
proxy: process.env.PROXY || null,
|
||||
...endpointOption
|
||||
};
|
||||
|
||||
const client = new GoogleClient(key, clientOptions);
|
||||
|
||||
let response = await client.sendMessage(text, {
|
||||
getIds,
|
||||
user: req.user.id,
|
||||
parentMessageId,
|
||||
conversationId,
|
||||
onProgress: progressCallback.call(null, { res, text, parentMessageId: userMessageId }),
|
||||
abortController
|
||||
});
|
||||
|
||||
await saveMessage(response);
|
||||
sendMessage(res, {
|
||||
title: await getConvoTitle(req.user.id, conversationId),
|
||||
final: true,
|
||||
conversation: await getConvo(req.user.id, conversationId),
|
||||
requestMessage: userMessage,
|
||||
responseMessage: response
|
||||
});
|
||||
res.end();
|
||||
|
||||
if (parentMessageId == '00000000-0000-0000-0000-000000000000') {
|
||||
const title = await titleConvo({ text, response });
|
||||
await saveConvo(req.user.id, {
|
||||
conversationId: conversationId,
|
||||
title
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
const errorMessage = {
|
||||
messageId: responseMessageId,
|
||||
sender: 'PaLM2',
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
unfinished: false,
|
||||
cancelled: false,
|
||||
error: true,
|
||||
text: error.message
|
||||
};
|
||||
await saveMessage(errorMessage);
|
||||
handleError(res, errorMessage);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = router;
|
||||
|
|
@ -2,11 +2,13 @@ const express = require('express');
|
|||
const router = express.Router();
|
||||
// const askAzureOpenAI = require('./askAzureOpenAI';)
|
||||
const askOpenAI = require('./askOpenAI');
|
||||
const askGoogle = require('./askGoogle');
|
||||
const askBingAI = require('./askBingAI');
|
||||
const askChatGPTBrowser = require('./askChatGPTBrowser');
|
||||
|
||||
// router.use('/azureOpenAI', askAzureOpenAI);
|
||||
router.use('/openAI', askOpenAI);
|
||||
router.use('/google', askGoogle);
|
||||
router.use('/bingAI', askBingAI);
|
||||
router.use('/chatGPTBrowser', askChatGPTBrowser);
|
||||
|
||||
|
|
|
|||
|
|
@ -15,9 +15,33 @@ const getChatGPTBrowserModels = () => {
|
|||
return models;
|
||||
};
|
||||
|
||||
router.get('/', function (req, res) {
|
||||
let i = 0;
|
||||
router.get('/', async function (req, res) {
|
||||
let key, palmUser;
|
||||
try {
|
||||
key = require('../../data/auth.json');
|
||||
} catch (e) {
|
||||
if (i === 0) {
|
||||
console.log("No 'auth.json' file (service account key) found in /api/data/ for PaLM models");
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
if (process.env.PALM_KEY === 'user_provided') {
|
||||
palmUser = true;
|
||||
if (i <= 1) {
|
||||
console.log('User will provide key for PaLM models');
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
const google =
|
||||
key || palmUser ? { userProvide: palmUser, availableModels: ['chat-bison', 'text-bison'] } : false;
|
||||
const azureOpenAI = !!process.env.AZURE_OPENAI_KEY;
|
||||
const openAI = process.env.OPENAI_KEY || process.env.AZURE_OPENAI_API_KEY ? { availableModels: getOpenAIModels() } : false;
|
||||
const openAI =
|
||||
process.env.OPENAI_KEY || process.env.AZURE_OPENAI_API_KEY
|
||||
? { availableModels: getOpenAIModels() }
|
||||
: false;
|
||||
const bingAI = process.env.BINGAI_TOKEN
|
||||
? { userProvide: process.env.BINGAI_TOKEN == 'user_provided' }
|
||||
: false;
|
||||
|
|
@ -28,7 +52,7 @@ router.get('/', function (req, res) {
|
|||
}
|
||||
: false;
|
||||
|
||||
res.send(JSON.stringify({ azureOpenAI, openAI, bingAI, chatGPTBrowser }));
|
||||
res.send(JSON.stringify({ azureOpenAI, openAI, google, bingAI, chatGPTBrowser }));
|
||||
});
|
||||
|
||||
module.exports = { router, getOpenAIModels, getChatGPTBrowserModels };
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue