mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-19 18:00:15 +01:00
feat(Google): Support all Text/Chat Models, Response streaming, PaLM -> Google 🤖 (#1316)
* feat: update PaLM icons * feat: add additional google models * POC: formatting inputs for Vertex AI streaming * refactor: move endpoints services outside of /routes dir to /services/Endpoints * refactor: shorten schemas import * refactor: rename PALM to GOOGLE * feat: make Google editable endpoint * feat: reusable Ask and Edit controllers based off Anthropic * chore: organize imports/logic * fix(parseConvo): include examples in googleSchema * fix: google only allows odd number of messages to be sent * fix: pass proxy to AnthropicClient * refactor: change `google` altName to `Google` * refactor: update getModelMaxTokens and related functions to handle maxTokensMap with nested endpoint model key/values * refactor: google Icon and response sender changes (Codey and Google logo instead of PaLM in all cases) * feat: google support for maxTokensMap * feat: google updated endpoints with Ask/Edit controllers, buildOptions, and initializeClient * feat(GoogleClient): now builds prompt for text models and supports real streaming from Vertex AI through langchain * chore(GoogleClient): remove comments, left before for reference in git history * docs: update google instructions (WIP) * docs(apis_and_tokens.md): add images to google instructions * docs: remove typo apis_and_tokens.md * Update apis_and_tokens.md * feat(Google): use default settings map, fully support context for both text and chat models, fully support examples for chat models * chore: update more PaLM references to Google * chore: move playwright out of workflows to avoid failing tests
This commit is contained in:
parent
8a1968b2f8
commit
583e978a82
90 changed files with 1613 additions and 784 deletions
15
api/server/services/Endpoints/anthropic/buildOptions.js
Normal file
15
api/server/services/Endpoints/anthropic/buildOptions.js
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
const buildOptions = (endpoint, parsedBody) => {
|
||||
const { modelLabel, promptPrefix, ...rest } = parsedBody;
|
||||
const endpointOption = {
|
||||
endpoint,
|
||||
modelLabel,
|
||||
promptPrefix,
|
||||
modelOptions: {
|
||||
...rest,
|
||||
},
|
||||
};
|
||||
|
||||
return endpointOption;
|
||||
};
|
||||
|
||||
module.exports = buildOptions;
|
||||
8
api/server/services/Endpoints/anthropic/index.js
Normal file
8
api/server/services/Endpoints/anthropic/index.js
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
const buildOptions = require('./buildOptions');
|
||||
const initializeClient = require('./initializeClient');
|
||||
|
||||
module.exports = {
|
||||
// addTitle, // todo
|
||||
buildOptions,
|
||||
initializeClient,
|
||||
};
|
||||
38
api/server/services/Endpoints/anthropic/initializeClient.js
Normal file
38
api/server/services/Endpoints/anthropic/initializeClient.js
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
const { AnthropicClient } = require('~/app');
|
||||
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
|
||||
const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
const { ANTHROPIC_API_KEY, ANTHROPIC_REVERSE_PROXY, PROXY } = process.env;
|
||||
const expiresAt = req.body.key;
|
||||
const isUserProvided = ANTHROPIC_API_KEY === 'user_provided';
|
||||
|
||||
const anthropicApiKey = isUserProvided
|
||||
? await getAnthropicUserKey(req.user.id)
|
||||
: ANTHROPIC_API_KEY;
|
||||
|
||||
if (expiresAt && isUserProvided) {
|
||||
checkUserKeyExpiry(
|
||||
expiresAt,
|
||||
'Your ANTHROPIC_API_KEY has expired. Please provide your API key again.',
|
||||
);
|
||||
}
|
||||
|
||||
const client = new AnthropicClient(anthropicApiKey, {
|
||||
req,
|
||||
res,
|
||||
reverseProxyUrl: ANTHROPIC_REVERSE_PROXY ?? null,
|
||||
proxy: PROXY ?? null,
|
||||
...endpointOption,
|
||||
});
|
||||
|
||||
return {
|
||||
client,
|
||||
anthropicApiKey,
|
||||
};
|
||||
};
|
||||
|
||||
const getAnthropicUserKey = async (userId) => {
|
||||
return await getUserKey({ userId, name: 'anthropic' });
|
||||
};
|
||||
|
||||
module.exports = initializeClient;
|
||||
16
api/server/services/Endpoints/google/buildOptions.js
Normal file
16
api/server/services/Endpoints/google/buildOptions.js
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
const buildOptions = (endpoint, parsedBody) => {
|
||||
const { examples, modelLabel, promptPrefix, ...rest } = parsedBody;
|
||||
const endpointOption = {
|
||||
examples,
|
||||
endpoint,
|
||||
modelLabel,
|
||||
promptPrefix,
|
||||
modelOptions: {
|
||||
...rest,
|
||||
},
|
||||
};
|
||||
|
||||
return endpointOption;
|
||||
};
|
||||
|
||||
module.exports = buildOptions;
|
||||
8
api/server/services/Endpoints/google/index.js
Normal file
8
api/server/services/Endpoints/google/index.js
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
const buildOptions = require('./buildOptions');
|
||||
const initializeClient = require('./initializeClient');
|
||||
|
||||
module.exports = {
|
||||
// addTitle, // todo
|
||||
buildOptions,
|
||||
initializeClient,
|
||||
};
|
||||
35
api/server/services/Endpoints/google/initializeClient.js
Normal file
35
api/server/services/Endpoints/google/initializeClient.js
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
const { GoogleClient } = require('~/app');
|
||||
const { EModelEndpoint } = require('~/server/services/Endpoints');
|
||||
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
|
||||
const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
const { GOOGLE_KEY, GOOGLE_REVERSE_PROXY, PROXY } = process.env;
|
||||
const isUserProvided = GOOGLE_KEY === 'user_provided';
|
||||
const { key: expiresAt } = req.body;
|
||||
|
||||
let userKey = null;
|
||||
if (expiresAt && isUserProvided) {
|
||||
checkUserKeyExpiry(
|
||||
expiresAt,
|
||||
'Your Google key has expired. Please provide your JSON credentials again.',
|
||||
);
|
||||
userKey = await getUserKey({ userId: req.user.id, name: EModelEndpoint.google });
|
||||
}
|
||||
|
||||
const apiKey = isUserProvided ? userKey : require('~/data/auth.json');
|
||||
|
||||
const client = new GoogleClient(apiKey, {
|
||||
req,
|
||||
res,
|
||||
reverseProxyUrl: GOOGLE_REVERSE_PROXY ?? null,
|
||||
proxy: PROXY ?? null,
|
||||
...endpointOption,
|
||||
});
|
||||
|
||||
return {
|
||||
client,
|
||||
apiKey,
|
||||
};
|
||||
};
|
||||
|
||||
module.exports = initializeClient;
|
||||
31
api/server/services/Endpoints/gptPlugins/buildOptions.js
Normal file
31
api/server/services/Endpoints/gptPlugins/buildOptions.js
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
const buildOptions = (endpoint, parsedBody) => {
|
||||
const {
|
||||
chatGptLabel,
|
||||
promptPrefix,
|
||||
agentOptions,
|
||||
tools,
|
||||
model,
|
||||
temperature,
|
||||
top_p,
|
||||
presence_penalty,
|
||||
frequency_penalty,
|
||||
} = parsedBody;
|
||||
const endpointOption = {
|
||||
endpoint,
|
||||
tools: tools.map((tool) => tool.pluginKey) ?? [],
|
||||
chatGptLabel,
|
||||
promptPrefix,
|
||||
agentOptions,
|
||||
modelOptions: {
|
||||
model,
|
||||
temperature,
|
||||
top_p,
|
||||
presence_penalty,
|
||||
frequency_penalty,
|
||||
},
|
||||
};
|
||||
|
||||
return endpointOption;
|
||||
};
|
||||
|
||||
module.exports = buildOptions;
|
||||
7
api/server/services/Endpoints/gptPlugins/index.js
Normal file
7
api/server/services/Endpoints/gptPlugins/index.js
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
const buildOptions = require('./buildOptions');
|
||||
const initializeClient = require('./initializeClient');
|
||||
|
||||
module.exports = {
|
||||
buildOptions,
|
||||
initializeClient,
|
||||
};
|
||||
65
api/server/services/Endpoints/gptPlugins/initializeClient.js
Normal file
65
api/server/services/Endpoints/gptPlugins/initializeClient.js
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
const { PluginsClient } = require('~/app');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { getAzureCredentials } = require('~/utils');
|
||||
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
|
||||
const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
const {
|
||||
PROXY,
|
||||
OPENAI_API_KEY,
|
||||
AZURE_API_KEY,
|
||||
PLUGINS_USE_AZURE,
|
||||
OPENAI_REVERSE_PROXY,
|
||||
OPENAI_SUMMARIZE,
|
||||
DEBUG_PLUGINS,
|
||||
} = process.env;
|
||||
const { key: expiresAt } = req.body;
|
||||
const contextStrategy = isEnabled(OPENAI_SUMMARIZE) ? 'summarize' : null;
|
||||
const clientOptions = {
|
||||
contextStrategy,
|
||||
debug: isEnabled(DEBUG_PLUGINS),
|
||||
reverseProxyUrl: OPENAI_REVERSE_PROXY ?? null,
|
||||
proxy: PROXY ?? null,
|
||||
req,
|
||||
res,
|
||||
...endpointOption,
|
||||
};
|
||||
|
||||
const useAzure = isEnabled(PLUGINS_USE_AZURE);
|
||||
|
||||
const isUserProvided = useAzure
|
||||
? AZURE_API_KEY === 'user_provided'
|
||||
: OPENAI_API_KEY === 'user_provided';
|
||||
|
||||
let userKey = null;
|
||||
if (expiresAt && isUserProvided) {
|
||||
checkUserKeyExpiry(
|
||||
expiresAt,
|
||||
'Your OpenAI API key has expired. Please provide your API key again.',
|
||||
);
|
||||
userKey = await getUserKey({
|
||||
userId: req.user.id,
|
||||
name: useAzure ? 'azureOpenAI' : 'openAI',
|
||||
});
|
||||
}
|
||||
|
||||
let apiKey = isUserProvided ? userKey : OPENAI_API_KEY;
|
||||
|
||||
if (useAzure || (apiKey && apiKey.includes('azure') && !clientOptions.azure)) {
|
||||
clientOptions.azure = isUserProvided ? JSON.parse(userKey) : getAzureCredentials();
|
||||
apiKey = clientOptions.azure.azureOpenAIApiKey;
|
||||
}
|
||||
|
||||
if (!apiKey) {
|
||||
throw new Error('API key not provided.');
|
||||
}
|
||||
|
||||
const client = new PluginsClient(apiKey, clientOptions);
|
||||
return {
|
||||
client,
|
||||
azure: clientOptions.azure,
|
||||
openAIApiKey: apiKey,
|
||||
};
|
||||
};
|
||||
|
||||
module.exports = initializeClient;
|
||||
|
|
@ -0,0 +1,218 @@
|
|||
// gptPlugins/initializeClient.spec.js
|
||||
const { PluginsClient } = require('~/app');
|
||||
const initializeClient = require('./initializeClient');
|
||||
const { getUserKey } = require('../../UserService');
|
||||
|
||||
// Mock getUserKey since it's the only function we want to mock
|
||||
jest.mock('~/server/services/UserService', () => ({
|
||||
getUserKey: jest.fn(),
|
||||
checkUserKeyExpiry: jest.requireActual('~/server/services/UserService').checkUserKeyExpiry,
|
||||
}));
|
||||
|
||||
describe('gptPlugins/initializeClient', () => {
|
||||
// Set up environment variables
|
||||
const originalEnvironment = process.env;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetModules(); // Clears the cache
|
||||
process.env = { ...originalEnvironment }; // Make a copy
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
process.env = originalEnvironment; // Restore original env vars
|
||||
});
|
||||
|
||||
test('should initialize PluginsClient with OpenAI API key and default options', async () => {
|
||||
process.env.OPENAI_API_KEY = 'test-openai-api-key';
|
||||
process.env.PLUGINS_USE_AZURE = 'false';
|
||||
process.env.DEBUG_PLUGINS = 'false';
|
||||
process.env.OPENAI_SUMMARIZE = 'false';
|
||||
|
||||
const req = {
|
||||
body: { key: null },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
|
||||
const { client, openAIApiKey } = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(openAIApiKey).toBe('test-openai-api-key');
|
||||
expect(client).toBeInstanceOf(PluginsClient);
|
||||
});
|
||||
|
||||
test('should initialize PluginsClient with Azure credentials when PLUGINS_USE_AZURE is true', async () => {
|
||||
process.env.AZURE_API_KEY = 'test-azure-api-key';
|
||||
(process.env.AZURE_OPENAI_API_INSTANCE_NAME = 'some-value'),
|
||||
(process.env.AZURE_OPENAI_API_DEPLOYMENT_NAME = 'some-value'),
|
||||
(process.env.AZURE_OPENAI_API_VERSION = 'some-value'),
|
||||
(process.env.AZURE_OPENAI_API_COMPLETIONS_DEPLOYMENT_NAME = 'some-value'),
|
||||
(process.env.AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME = 'some-value'),
|
||||
(process.env.PLUGINS_USE_AZURE = 'true');
|
||||
process.env.DEBUG_PLUGINS = 'false';
|
||||
process.env.OPENAI_SUMMARIZE = 'false';
|
||||
|
||||
const req = {
|
||||
body: { key: null },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'test-model' } };
|
||||
|
||||
const { client, azure } = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(azure.azureOpenAIApiKey).toBe('test-azure-api-key');
|
||||
expect(client).toBeInstanceOf(PluginsClient);
|
||||
});
|
||||
|
||||
test('should use the debug option when DEBUG_PLUGINS is enabled', async () => {
|
||||
process.env.OPENAI_API_KEY = 'test-openai-api-key';
|
||||
process.env.DEBUG_PLUGINS = 'true';
|
||||
|
||||
const req = {
|
||||
body: { key: null },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
|
||||
const { client } = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(client.options.debug).toBe(true);
|
||||
});
|
||||
|
||||
test('should set contextStrategy to summarize when OPENAI_SUMMARIZE is enabled', async () => {
|
||||
process.env.OPENAI_API_KEY = 'test-openai-api-key';
|
||||
process.env.OPENAI_SUMMARIZE = 'true';
|
||||
|
||||
const req = {
|
||||
body: { key: null },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
|
||||
const { client } = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(client.options.contextStrategy).toBe('summarize');
|
||||
});
|
||||
|
||||
// ... additional tests for reverseProxyUrl, proxy, user-provided keys, etc.
|
||||
|
||||
test('should throw an error if no API keys are provided in the environment', async () => {
|
||||
// Clear the environment variables for API keys
|
||||
delete process.env.OPENAI_API_KEY;
|
||||
delete process.env.AZURE_API_KEY;
|
||||
|
||||
const req = {
|
||||
body: { key: null },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
'API key not provided.',
|
||||
);
|
||||
});
|
||||
|
||||
// Additional tests for gptPlugins/initializeClient.spec.js
|
||||
|
||||
// ... (previous test setup code)
|
||||
|
||||
test('should handle user-provided OpenAI keys and check expiry', async () => {
|
||||
process.env.OPENAI_API_KEY = 'user_provided';
|
||||
process.env.PLUGINS_USE_AZURE = 'false';
|
||||
|
||||
const futureDate = new Date(Date.now() + 10000).toISOString();
|
||||
const req = {
|
||||
body: { key: futureDate },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
|
||||
getUserKey.mockResolvedValue('test-user-provided-openai-api-key');
|
||||
|
||||
const { openAIApiKey } = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(openAIApiKey).toBe('test-user-provided-openai-api-key');
|
||||
});
|
||||
|
||||
test('should handle user-provided Azure keys and check expiry', async () => {
|
||||
process.env.AZURE_API_KEY = 'user_provided';
|
||||
process.env.PLUGINS_USE_AZURE = 'true';
|
||||
|
||||
const futureDate = new Date(Date.now() + 10000).toISOString();
|
||||
const req = {
|
||||
body: { key: futureDate },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'test-model' } };
|
||||
|
||||
getUserKey.mockResolvedValue(
|
||||
JSON.stringify({
|
||||
azureOpenAIApiKey: 'test-user-provided-azure-api-key',
|
||||
azureOpenAIApiDeploymentName: 'test-deployment',
|
||||
}),
|
||||
);
|
||||
|
||||
const { azure } = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(azure.azureOpenAIApiKey).toBe('test-user-provided-azure-api-key');
|
||||
});
|
||||
|
||||
test('should throw an error if the user-provided key has expired', async () => {
|
||||
process.env.OPENAI_API_KEY = 'user_provided';
|
||||
process.env.PLUGINS_USE_AZURE = 'FALSE';
|
||||
const expiresAt = new Date(Date.now() - 10000).toISOString(); // Expired
|
||||
const req = {
|
||||
body: { key: expiresAt },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
/Your OpenAI API key has expired/,
|
||||
);
|
||||
});
|
||||
|
||||
test('should throw an error if the user-provided Azure key is invalid JSON', async () => {
|
||||
process.env.AZURE_API_KEY = 'user_provided';
|
||||
process.env.PLUGINS_USE_AZURE = 'true';
|
||||
|
||||
const req = {
|
||||
body: { key: new Date(Date.now() + 10000).toISOString() },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
|
||||
// Simulate an invalid JSON string returned from getUserKey
|
||||
getUserKey.mockResolvedValue('invalid-json');
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
/Unexpected token/,
|
||||
);
|
||||
});
|
||||
|
||||
test('should correctly handle the presence of a reverse proxy', async () => {
|
||||
process.env.OPENAI_REVERSE_PROXY = 'http://reverse.proxy';
|
||||
process.env.PROXY = 'http://proxy';
|
||||
process.env.OPENAI_API_KEY = 'test-openai-api-key';
|
||||
|
||||
const req = {
|
||||
body: { key: null },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
|
||||
const { client } = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(client.options.reverseProxyUrl).toBe('http://reverse.proxy');
|
||||
expect(client.options.proxy).toBe('http://proxy');
|
||||
});
|
||||
});
|
||||
5
api/server/services/Endpoints/index.js
Normal file
5
api/server/services/Endpoints/index.js
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
const schemas = require('./schemas');
|
||||
|
||||
module.exports = {
|
||||
...schemas,
|
||||
};
|
||||
22
api/server/services/Endpoints/openAI/addTitle.js
Normal file
22
api/server/services/Endpoints/openAI/addTitle.js
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
const { saveConvo } = require('~/models');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
|
||||
const addTitle = async (req, { text, response, client }) => {
|
||||
const { TITLE_CONVO = 'true' } = process.env ?? {};
|
||||
if (!isEnabled(TITLE_CONVO)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the request was aborted, don't generate the title.
|
||||
if (client.abortController.signal.aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
const title = await client.titleConvo({ text, responseText: response?.text });
|
||||
await saveConvo(req.user.id, {
|
||||
conversationId: response.conversationId,
|
||||
title,
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = addTitle;
|
||||
15
api/server/services/Endpoints/openAI/buildOptions.js
Normal file
15
api/server/services/Endpoints/openAI/buildOptions.js
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
const buildOptions = (endpoint, parsedBody) => {
|
||||
const { chatGptLabel, promptPrefix, ...rest } = parsedBody;
|
||||
const endpointOption = {
|
||||
endpoint,
|
||||
chatGptLabel,
|
||||
promptPrefix,
|
||||
modelOptions: {
|
||||
...rest,
|
||||
},
|
||||
};
|
||||
|
||||
return endpointOption;
|
||||
};
|
||||
|
||||
module.exports = buildOptions;
|
||||
9
api/server/services/Endpoints/openAI/index.js
Normal file
9
api/server/services/Endpoints/openAI/index.js
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
const addTitle = require('./addTitle');
|
||||
const buildOptions = require('./buildOptions');
|
||||
const initializeClient = require('./initializeClient');
|
||||
|
||||
module.exports = {
|
||||
addTitle,
|
||||
buildOptions,
|
||||
initializeClient,
|
||||
};
|
||||
61
api/server/services/Endpoints/openAI/initializeClient.js
Normal file
61
api/server/services/Endpoints/openAI/initializeClient.js
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
const { OpenAIClient } = require('~/app');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { getAzureCredentials } = require('~/utils');
|
||||
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
|
||||
const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
const {
|
||||
PROXY,
|
||||
OPENAI_API_KEY,
|
||||
AZURE_API_KEY,
|
||||
OPENAI_REVERSE_PROXY,
|
||||
OPENAI_SUMMARIZE,
|
||||
DEBUG_OPENAI,
|
||||
} = process.env;
|
||||
const { key: expiresAt, endpoint } = req.body;
|
||||
const contextStrategy = isEnabled(OPENAI_SUMMARIZE) ? 'summarize' : null;
|
||||
const clientOptions = {
|
||||
debug: isEnabled(DEBUG_OPENAI),
|
||||
contextStrategy,
|
||||
reverseProxyUrl: OPENAI_REVERSE_PROXY ?? null,
|
||||
proxy: PROXY ?? null,
|
||||
req,
|
||||
res,
|
||||
...endpointOption,
|
||||
};
|
||||
|
||||
const credentials = {
|
||||
openAI: OPENAI_API_KEY,
|
||||
azureOpenAI: AZURE_API_KEY,
|
||||
};
|
||||
|
||||
const isUserProvided = credentials[endpoint] === 'user_provided';
|
||||
|
||||
let userKey = null;
|
||||
if (expiresAt && isUserProvided) {
|
||||
checkUserKeyExpiry(
|
||||
expiresAt,
|
||||
'Your OpenAI API key has expired. Please provide your API key again.',
|
||||
);
|
||||
userKey = await getUserKey({ userId: req.user.id, name: endpoint });
|
||||
}
|
||||
|
||||
let apiKey = isUserProvided ? userKey : credentials[endpoint];
|
||||
|
||||
if (endpoint === 'azureOpenAI') {
|
||||
clientOptions.azure = isUserProvided ? JSON.parse(userKey) : getAzureCredentials();
|
||||
apiKey = clientOptions.azure.azureOpenAIApiKey;
|
||||
}
|
||||
|
||||
if (!apiKey) {
|
||||
throw new Error('API key not provided.');
|
||||
}
|
||||
|
||||
const client = new OpenAIClient(apiKey, clientOptions);
|
||||
return {
|
||||
client,
|
||||
openAIApiKey: apiKey,
|
||||
};
|
||||
};
|
||||
|
||||
module.exports = initializeClient;
|
||||
199
api/server/services/Endpoints/openAI/initializeClient.spec.js
Normal file
199
api/server/services/Endpoints/openAI/initializeClient.spec.js
Normal file
|
|
@ -0,0 +1,199 @@
|
|||
const { OpenAIClient } = require('~/app');
|
||||
const initializeClient = require('./initializeClient');
|
||||
const { getUserKey } = require('~/server/services/UserService');
|
||||
|
||||
// Mock getUserKey since it's the only function we want to mock
|
||||
jest.mock('~/server/services/UserService', () => ({
|
||||
getUserKey: jest.fn(),
|
||||
checkUserKeyExpiry: jest.requireActual('~/server/services/UserService').checkUserKeyExpiry,
|
||||
}));
|
||||
|
||||
describe('initializeClient', () => {
|
||||
// Set up environment variables
|
||||
const originalEnvironment = process.env;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetModules(); // Clears the cache
|
||||
process.env = { ...originalEnvironment }; // Make a copy
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
process.env = originalEnvironment; // Restore original env vars
|
||||
});
|
||||
|
||||
test('should initialize client with OpenAI API key and default options', async () => {
|
||||
process.env.OPENAI_API_KEY = 'test-openai-api-key';
|
||||
process.env.DEBUG_OPENAI = 'false';
|
||||
process.env.OPENAI_SUMMARIZE = 'false';
|
||||
|
||||
const req = {
|
||||
body: { key: null, endpoint: 'openAI' },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
const client = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(client.openAIApiKey).toBe('test-openai-api-key');
|
||||
expect(client.client).toBeInstanceOf(OpenAIClient);
|
||||
});
|
||||
|
||||
test('should initialize client with Azure credentials when endpoint is azureOpenAI', async () => {
|
||||
process.env.AZURE_API_KEY = 'test-azure-api-key';
|
||||
(process.env.AZURE_OPENAI_API_INSTANCE_NAME = 'some-value'),
|
||||
(process.env.AZURE_OPENAI_API_DEPLOYMENT_NAME = 'some-value'),
|
||||
(process.env.AZURE_OPENAI_API_VERSION = 'some-value'),
|
||||
(process.env.AZURE_OPENAI_API_COMPLETIONS_DEPLOYMENT_NAME = 'some-value'),
|
||||
(process.env.AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME = 'some-value'),
|
||||
(process.env.OPENAI_API_KEY = 'test-openai-api-key');
|
||||
process.env.DEBUG_OPENAI = 'false';
|
||||
process.env.OPENAI_SUMMARIZE = 'false';
|
||||
|
||||
const req = {
|
||||
body: { key: null, endpoint: 'azureOpenAI' },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'test-model' } };
|
||||
|
||||
const client = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(client.openAIApiKey).toBe('test-azure-api-key');
|
||||
expect(client.client).toBeInstanceOf(OpenAIClient);
|
||||
});
|
||||
|
||||
test('should use the debug option when DEBUG_OPENAI is enabled', async () => {
|
||||
process.env.OPENAI_API_KEY = 'test-openai-api-key';
|
||||
process.env.DEBUG_OPENAI = 'true';
|
||||
|
||||
const req = {
|
||||
body: { key: null, endpoint: 'openAI' },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
const client = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(client.client.options.debug).toBe(true);
|
||||
});
|
||||
|
||||
test('should set contextStrategy to summarize when OPENAI_SUMMARIZE is enabled', async () => {
|
||||
process.env.OPENAI_API_KEY = 'test-openai-api-key';
|
||||
process.env.OPENAI_SUMMARIZE = 'true';
|
||||
|
||||
const req = {
|
||||
body: { key: null, endpoint: 'openAI' },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
const client = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(client.client.options.contextStrategy).toBe('summarize');
|
||||
});
|
||||
|
||||
test('should set reverseProxyUrl and proxy when they are provided in the environment', async () => {
|
||||
process.env.OPENAI_API_KEY = 'test-openai-api-key';
|
||||
process.env.OPENAI_REVERSE_PROXY = 'http://reverse.proxy';
|
||||
process.env.PROXY = 'http://proxy';
|
||||
|
||||
const req = {
|
||||
body: { key: null, endpoint: 'openAI' },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
const client = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(client.client.options.reverseProxyUrl).toBe('http://reverse.proxy');
|
||||
expect(client.client.options.proxy).toBe('http://proxy');
|
||||
});
|
||||
|
||||
test('should throw an error if the user-provided key has expired', async () => {
|
||||
process.env.OPENAI_API_KEY = 'user_provided';
|
||||
process.env.AZURE_API_KEY = 'user_provided';
|
||||
process.env.DEBUG_OPENAI = 'false';
|
||||
process.env.OPENAI_SUMMARIZE = 'false';
|
||||
|
||||
const expiresAt = new Date(Date.now() - 10000).toISOString(); // Expired
|
||||
const req = {
|
||||
body: { key: expiresAt, endpoint: 'openAI' },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
'Your OpenAI API key has expired. Please provide your API key again.',
|
||||
);
|
||||
});
|
||||
|
||||
test('should throw an error if no API keys are provided in the environment', async () => {
|
||||
// Clear the environment variables for API keys
|
||||
delete process.env.OPENAI_API_KEY;
|
||||
delete process.env.AZURE_API_KEY;
|
||||
|
||||
const req = {
|
||||
body: { key: null, endpoint: 'openAI' },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
'API key not provided.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle user-provided keys and check expiry', async () => {
|
||||
// Set up the req.body to simulate user-provided key scenario
|
||||
const req = {
|
||||
body: {
|
||||
key: new Date(Date.now() + 10000).toISOString(),
|
||||
endpoint: 'openAI',
|
||||
},
|
||||
user: {
|
||||
id: '123',
|
||||
},
|
||||
};
|
||||
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
// Ensure the environment variable is set to 'user_provided' to match the isUserProvided condition
|
||||
process.env.OPENAI_API_KEY = 'user_provided';
|
||||
|
||||
// Mock getUserKey to return the expected key
|
||||
getUserKey.mockResolvedValue('test-user-provided-openai-api-key');
|
||||
|
||||
// Call the initializeClient function
|
||||
const result = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
// Assertions
|
||||
expect(result.openAIApiKey).toBe('test-user-provided-openai-api-key');
|
||||
});
|
||||
|
||||
test('should throw an error if the user-provided key is invalid', async () => {
|
||||
const invalidKey = new Date(Date.now() - 100000).toISOString();
|
||||
const req = {
|
||||
body: { key: invalidKey, endpoint: 'openAI' },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
// Ensure the environment variable is set to 'user_provided' to match the isUserProvided condition
|
||||
process.env.OPENAI_API_KEY = 'user_provided';
|
||||
|
||||
// Mock getUserKey to return an invalid key
|
||||
getUserKey.mockResolvedValue(invalidKey);
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
/Your OpenAI API key has expired/,
|
||||
);
|
||||
});
|
||||
});
|
||||
445
api/server/services/Endpoints/schemas.js
Normal file
445
api/server/services/Endpoints/schemas.js
Normal file
|
|
@ -0,0 +1,445 @@
|
|||
const { z } = require('zod');
|
||||
|
||||
const EModelEndpoint = {
|
||||
azureOpenAI: 'azureOpenAI',
|
||||
openAI: 'openAI',
|
||||
bingAI: 'bingAI',
|
||||
chatGPTBrowser: 'chatGPTBrowser',
|
||||
google: 'google',
|
||||
gptPlugins: 'gptPlugins',
|
||||
anthropic: 'anthropic',
|
||||
assistant: 'assistant',
|
||||
};
|
||||
|
||||
const alternateName = {
|
||||
[EModelEndpoint.openAI]: 'OpenAI',
|
||||
[EModelEndpoint.assistant]: 'Assistants',
|
||||
[EModelEndpoint.azureOpenAI]: 'Azure OpenAI',
|
||||
[EModelEndpoint.bingAI]: 'Bing',
|
||||
[EModelEndpoint.chatGPTBrowser]: 'ChatGPT',
|
||||
[EModelEndpoint.gptPlugins]: 'Plugins',
|
||||
[EModelEndpoint.google]: 'Google',
|
||||
[EModelEndpoint.anthropic]: 'Anthropic',
|
||||
};
|
||||
|
||||
const endpointSettings = {
|
||||
[EModelEndpoint.google]: {
|
||||
model: {
|
||||
default: 'chat-bison',
|
||||
},
|
||||
maxOutputTokens: {
|
||||
min: 1,
|
||||
max: 2048,
|
||||
step: 1,
|
||||
default: 1024,
|
||||
},
|
||||
temperature: {
|
||||
min: 0,
|
||||
max: 1,
|
||||
step: 0.01,
|
||||
default: 0.2,
|
||||
},
|
||||
topP: {
|
||||
min: 0,
|
||||
max: 1,
|
||||
step: 0.01,
|
||||
default: 0.8,
|
||||
},
|
||||
topK: {
|
||||
min: 1,
|
||||
max: 40,
|
||||
step: 0.01,
|
||||
default: 40,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const google = endpointSettings[EModelEndpoint.google];
|
||||
|
||||
const supportsFiles = {
|
||||
[EModelEndpoint.openAI]: true,
|
||||
[EModelEndpoint.assistant]: true,
|
||||
};
|
||||
|
||||
const openAIModels = [
|
||||
'gpt-3.5-turbo-16k-0613',
|
||||
'gpt-3.5-turbo-16k',
|
||||
'gpt-4-1106-preview',
|
||||
'gpt-3.5-turbo',
|
||||
'gpt-3.5-turbo-1106',
|
||||
'gpt-4-vision-preview',
|
||||
'gpt-4',
|
||||
'gpt-3.5-turbo-instruct-0914',
|
||||
'gpt-3.5-turbo-0613',
|
||||
'gpt-3.5-turbo-0301',
|
||||
'gpt-3.5-turbo-instruct',
|
||||
'gpt-4-0613',
|
||||
'text-davinci-003',
|
||||
'gpt-4-0314',
|
||||
];
|
||||
|
||||
const visionModels = ['gpt-4-vision', 'llava-13b'];
|
||||
|
||||
const eModelEndpointSchema = z.nativeEnum(EModelEndpoint);
|
||||
|
||||
const tPluginAuthConfigSchema = z.object({
|
||||
authField: z.string(),
|
||||
label: z.string(),
|
||||
description: z.string(),
|
||||
});
|
||||
|
||||
const tPluginSchema = z.object({
|
||||
name: z.string(),
|
||||
pluginKey: z.string(),
|
||||
description: z.string(),
|
||||
icon: z.string(),
|
||||
authConfig: z.array(tPluginAuthConfigSchema),
|
||||
authenticated: z.boolean().optional(),
|
||||
isButton: z.boolean().optional(),
|
||||
});
|
||||
|
||||
const tExampleSchema = z.object({
|
||||
input: z.object({
|
||||
content: z.string(),
|
||||
}),
|
||||
output: z.object({
|
||||
content: z.string(),
|
||||
}),
|
||||
});
|
||||
|
||||
const tAgentOptionsSchema = z.object({
|
||||
agent: z.string(),
|
||||
skipCompletion: z.boolean(),
|
||||
model: z.string(),
|
||||
temperature: z.number(),
|
||||
});
|
||||
|
||||
const tConversationSchema = z.object({
|
||||
conversationId: z.string().nullable(),
|
||||
title: z.string().nullable().or(z.literal('New Chat')).default('New Chat'),
|
||||
user: z.string().optional(),
|
||||
endpoint: eModelEndpointSchema.nullable(),
|
||||
suggestions: z.array(z.string()).optional(),
|
||||
messages: z.array(z.string()).optional(),
|
||||
tools: z.array(tPluginSchema).optional(),
|
||||
createdAt: z.string(),
|
||||
updatedAt: z.string(),
|
||||
systemMessage: z.string().nullable().optional(),
|
||||
modelLabel: z.string().nullable().optional(),
|
||||
examples: z.array(tExampleSchema).optional(),
|
||||
chatGptLabel: z.string().nullable().optional(),
|
||||
userLabel: z.string().optional(),
|
||||
model: z.string().nullable().optional(),
|
||||
promptPrefix: z.string().nullable().optional(),
|
||||
temperature: z.number().optional(),
|
||||
topP: z.number().optional(),
|
||||
topK: z.number().optional(),
|
||||
context: z.string().nullable().optional(),
|
||||
top_p: z.number().optional(),
|
||||
frequency_penalty: z.number().optional(),
|
||||
presence_penalty: z.number().optional(),
|
||||
jailbreak: z.boolean().optional(),
|
||||
jailbreakConversationId: z.string().nullable().optional(),
|
||||
conversationSignature: z.string().nullable().optional(),
|
||||
parentMessageId: z.string().optional(),
|
||||
clientId: z.string().nullable().optional(),
|
||||
invocationId: z.number().nullable().optional(),
|
||||
toneStyle: z.string().nullable().optional(),
|
||||
maxOutputTokens: z.number().optional(),
|
||||
agentOptions: tAgentOptionsSchema.nullable().optional(),
|
||||
});
|
||||
|
||||
const openAISchema = tConversationSchema
|
||||
.pick({
|
||||
model: true,
|
||||
chatGptLabel: true,
|
||||
promptPrefix: true,
|
||||
temperature: true,
|
||||
top_p: true,
|
||||
presence_penalty: true,
|
||||
frequency_penalty: true,
|
||||
})
|
||||
.transform((obj) => ({
|
||||
...obj,
|
||||
model: obj.model ?? 'gpt-3.5-turbo',
|
||||
chatGptLabel: obj.chatGptLabel ?? null,
|
||||
promptPrefix: obj.promptPrefix ?? null,
|
||||
temperature: obj.temperature ?? 1,
|
||||
top_p: obj.top_p ?? 1,
|
||||
presence_penalty: obj.presence_penalty ?? 0,
|
||||
frequency_penalty: obj.frequency_penalty ?? 0,
|
||||
}))
|
||||
.catch(() => ({
|
||||
model: 'gpt-3.5-turbo',
|
||||
chatGptLabel: null,
|
||||
promptPrefix: null,
|
||||
temperature: 1,
|
||||
top_p: 1,
|
||||
presence_penalty: 0,
|
||||
frequency_penalty: 0,
|
||||
}));
|
||||
|
||||
const googleSchema = tConversationSchema
|
||||
.pick({
|
||||
model: true,
|
||||
modelLabel: true,
|
||||
promptPrefix: true,
|
||||
examples: true,
|
||||
temperature: true,
|
||||
maxOutputTokens: true,
|
||||
topP: true,
|
||||
topK: true,
|
||||
})
|
||||
.transform((obj) => ({
|
||||
...obj,
|
||||
model: obj.model ?? google.model.default,
|
||||
modelLabel: obj.modelLabel ?? null,
|
||||
promptPrefix: obj.promptPrefix ?? null,
|
||||
examples: obj.examples ?? [{ input: { content: '' }, output: { content: '' } }],
|
||||
temperature: obj.temperature ?? google.temperature.default,
|
||||
maxOutputTokens: obj.maxOutputTokens ?? google.maxOutputTokens.default,
|
||||
topP: obj.topP ?? google.topP.default,
|
||||
topK: obj.topK ?? google.topK.default,
|
||||
}))
|
||||
.catch(() => ({
|
||||
model: google.model.default,
|
||||
modelLabel: null,
|
||||
promptPrefix: null,
|
||||
examples: [{ input: { content: '' }, output: { content: '' } }],
|
||||
temperature: google.temperature.default,
|
||||
maxOutputTokens: google.maxOutputTokens.default,
|
||||
topP: google.topP.default,
|
||||
topK: google.topK.default,
|
||||
}));
|
||||
|
||||
const bingAISchema = tConversationSchema
|
||||
.pick({
|
||||
jailbreak: true,
|
||||
systemMessage: true,
|
||||
context: true,
|
||||
toneStyle: true,
|
||||
jailbreakConversationId: true,
|
||||
conversationSignature: true,
|
||||
clientId: true,
|
||||
invocationId: true,
|
||||
})
|
||||
.transform((obj) => ({
|
||||
...obj,
|
||||
model: '',
|
||||
jailbreak: obj.jailbreak ?? false,
|
||||
systemMessage: obj.systemMessage ?? null,
|
||||
context: obj.context ?? null,
|
||||
toneStyle: obj.toneStyle ?? 'creative',
|
||||
jailbreakConversationId: obj.jailbreakConversationId ?? null,
|
||||
conversationSignature: obj.conversationSignature ?? null,
|
||||
clientId: obj.clientId ?? null,
|
||||
invocationId: obj.invocationId ?? 1,
|
||||
}))
|
||||
.catch(() => ({
|
||||
model: '',
|
||||
jailbreak: false,
|
||||
systemMessage: null,
|
||||
context: null,
|
||||
toneStyle: 'creative',
|
||||
jailbreakConversationId: null,
|
||||
conversationSignature: null,
|
||||
clientId: null,
|
||||
invocationId: 1,
|
||||
}));
|
||||
|
||||
const anthropicSchema = tConversationSchema
|
||||
.pick({
|
||||
model: true,
|
||||
modelLabel: true,
|
||||
promptPrefix: true,
|
||||
temperature: true,
|
||||
maxOutputTokens: true,
|
||||
topP: true,
|
||||
topK: true,
|
||||
})
|
||||
.transform((obj) => ({
|
||||
...obj,
|
||||
model: obj.model ?? 'claude-1',
|
||||
modelLabel: obj.modelLabel ?? null,
|
||||
promptPrefix: obj.promptPrefix ?? null,
|
||||
temperature: obj.temperature ?? 1,
|
||||
maxOutputTokens: obj.maxOutputTokens ?? 4000,
|
||||
topP: obj.topP ?? 0.7,
|
||||
topK: obj.topK ?? 5,
|
||||
}))
|
||||
.catch(() => ({
|
||||
model: 'claude-1',
|
||||
modelLabel: null,
|
||||
promptPrefix: null,
|
||||
temperature: 1,
|
||||
maxOutputTokens: 4000,
|
||||
topP: 0.7,
|
||||
topK: 5,
|
||||
}));
|
||||
|
||||
const chatGPTBrowserSchema = tConversationSchema
|
||||
.pick({
|
||||
model: true,
|
||||
})
|
||||
.transform((obj) => ({
|
||||
...obj,
|
||||
model: obj.model ?? 'text-davinci-002-render-sha',
|
||||
}))
|
||||
.catch(() => ({
|
||||
model: 'text-davinci-002-render-sha',
|
||||
}));
|
||||
|
||||
const gptPluginsSchema = tConversationSchema
|
||||
.pick({
|
||||
model: true,
|
||||
chatGptLabel: true,
|
||||
promptPrefix: true,
|
||||
temperature: true,
|
||||
top_p: true,
|
||||
presence_penalty: true,
|
||||
frequency_penalty: true,
|
||||
tools: true,
|
||||
agentOptions: true,
|
||||
})
|
||||
.transform((obj) => ({
|
||||
...obj,
|
||||
model: obj.model ?? 'gpt-3.5-turbo',
|
||||
chatGptLabel: obj.chatGptLabel ?? null,
|
||||
promptPrefix: obj.promptPrefix ?? null,
|
||||
temperature: obj.temperature ?? 0.8,
|
||||
top_p: obj.top_p ?? 1,
|
||||
presence_penalty: obj.presence_penalty ?? 0,
|
||||
frequency_penalty: obj.frequency_penalty ?? 0,
|
||||
tools: obj.tools ?? [],
|
||||
agentOptions: obj.agentOptions ?? {
|
||||
agent: 'functions',
|
||||
skipCompletion: true,
|
||||
model: 'gpt-3.5-turbo',
|
||||
temperature: 0,
|
||||
},
|
||||
}))
|
||||
.catch(() => ({
|
||||
model: 'gpt-3.5-turbo',
|
||||
chatGptLabel: null,
|
||||
promptPrefix: null,
|
||||
temperature: 0.8,
|
||||
top_p: 1,
|
||||
presence_penalty: 0,
|
||||
frequency_penalty: 0,
|
||||
tools: [],
|
||||
agentOptions: {
|
||||
agent: 'functions',
|
||||
skipCompletion: true,
|
||||
model: 'gpt-3.5-turbo',
|
||||
temperature: 0,
|
||||
},
|
||||
}));
|
||||
|
||||
const assistantSchema = tConversationSchema
|
||||
.pick({
|
||||
model: true,
|
||||
assistant_id: true,
|
||||
thread_id: true,
|
||||
})
|
||||
.transform((obj) => {
|
||||
const newObj = { ...obj };
|
||||
Object.keys(newObj).forEach((key) => {
|
||||
const value = newObj[key];
|
||||
if (value === undefined || value === null) {
|
||||
delete newObj[key];
|
||||
}
|
||||
});
|
||||
return newObj;
|
||||
})
|
||||
.catch(() => ({}));
|
||||
|
||||
const endpointSchemas = {
|
||||
[EModelEndpoint.openAI]: openAISchema,
|
||||
[EModelEndpoint.assistant]: assistantSchema,
|
||||
[EModelEndpoint.azureOpenAI]: openAISchema,
|
||||
[EModelEndpoint.google]: googleSchema,
|
||||
[EModelEndpoint.bingAI]: bingAISchema,
|
||||
[EModelEndpoint.anthropic]: anthropicSchema,
|
||||
[EModelEndpoint.chatGPTBrowser]: chatGPTBrowserSchema,
|
||||
[EModelEndpoint.gptPlugins]: gptPluginsSchema,
|
||||
};
|
||||
|
||||
function getFirstDefinedValue(possibleValues) {
|
||||
let returnValue;
|
||||
for (const value of possibleValues) {
|
||||
if (value) {
|
||||
returnValue = value;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return returnValue;
|
||||
}
|
||||
|
||||
const parseConvo = (endpoint, conversation, possibleValues) => {
|
||||
const schema = endpointSchemas[endpoint];
|
||||
|
||||
if (!schema) {
|
||||
throw new Error(`Unknown endpoint: ${endpoint}`);
|
||||
}
|
||||
|
||||
const convo = schema.parse(conversation);
|
||||
|
||||
if (possibleValues && convo) {
|
||||
convo.model = getFirstDefinedValue(possibleValues.model) ?? convo.model;
|
||||
}
|
||||
|
||||
return convo;
|
||||
};
|
||||
|
||||
const getResponseSender = (endpointOption) => {
|
||||
const { model, endpoint, chatGptLabel, modelLabel, jailbreak } = endpointOption;
|
||||
|
||||
if (
|
||||
[
|
||||
EModelEndpoint.openAI,
|
||||
EModelEndpoint.azureOpenAI,
|
||||
EModelEndpoint.gptPlugins,
|
||||
EModelEndpoint.chatGPTBrowser,
|
||||
].includes(endpoint)
|
||||
) {
|
||||
if (chatGptLabel) {
|
||||
return chatGptLabel;
|
||||
} else if (model && model.includes('gpt-3')) {
|
||||
return 'GPT-3.5';
|
||||
} else if (model && model.includes('gpt-4')) {
|
||||
return 'GPT-4';
|
||||
}
|
||||
return alternateName[endpoint] ?? 'ChatGPT';
|
||||
}
|
||||
|
||||
if (endpoint === EModelEndpoint.bingAI) {
|
||||
return jailbreak ? 'Sydney' : 'BingAI';
|
||||
}
|
||||
|
||||
if (endpoint === EModelEndpoint.anthropic) {
|
||||
return modelLabel ?? 'Claude';
|
||||
}
|
||||
|
||||
if (endpoint === EModelEndpoint.google) {
|
||||
if (modelLabel) {
|
||||
return modelLabel;
|
||||
} else if (model && model.includes('code')) {
|
||||
return 'Codey';
|
||||
}
|
||||
|
||||
return 'PaLM2';
|
||||
}
|
||||
|
||||
return '';
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
parseConvo,
|
||||
getResponseSender,
|
||||
EModelEndpoint,
|
||||
supportsFiles,
|
||||
openAIModels,
|
||||
visionModels,
|
||||
alternateName,
|
||||
endpointSettings,
|
||||
};
|
||||
Loading…
Add table
Add a link
Reference in a new issue