feat: feat: new endpoint-style submit

This commit is contained in:
Wentao Lyu 2023-03-31 03:22:57 +08:00
parent 089ca5f120
commit adcc021c9e
22 changed files with 566 additions and 478 deletions

View file

@ -1,30 +1,43 @@
require('dotenv').config();
const { KeyvFile } = require('keyv-file');
const set = new Set(['gpt-4', 'text-davinci-003', 'gpt-3.5-turbo', 'gpt-3.5-turbo-0301']);
// const set = new Set(['gpt-4', 'text-davinci-003', 'gpt-3.5-turbo', 'gpt-3.5-turbo-0301']);
const clientOptions = {
modelOptions: {
model: 'gpt-3.5-turbo'
},
proxy: process.env.PROXY || null,
debug: false
};
if (set.has(process.env.DEFAULT_API_GPT)) {
clientOptions.modelOptions.model = process.env.DEFAULT_API_GPT;
}
const askClient = async ({ text, onProgress, convo, abortController }) => {
const askClient = async ({
text,
parentMessageId,
conversationId,
model,
chatGptLabel,
promptPrefix,
temperature,
top_p,
presence_penalty,
onProgress,
abortController
}) => {
const ChatGPTClient = (await import('@waylaidwanderer/chatgpt-api')).default;
const store = {
store: new KeyvFile({ filename: './data/cache.json' })
};
const clientOptions = {
modelOptions: {
model: model,
temperature,
top_p,
presence_penalty
},
chatGptLabel,
promptPrefix,
proxy: process.env.PROXY || null,
debug: false
};
const client = new ChatGPTClient(process.env.OPENAI_KEY, clientOptions, store);
let options = { onProgress, abortController };
if (!!convo.parentMessageId && !!convo.conversationId) {
options = { ...options, ...convo };
if (!!parentMessageId && !!conversationId) {
options = { ...options, parentMessageId, conversationId };
}
const res = await client.sendMessage(text, options);