mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-19 18:00:15 +01:00
feat(Google): Support all Text/Chat Models, Response streaming, PaLM -> Google 🤖 (#1316)
* feat: update PaLM icons * feat: add additional google models * POC: formatting inputs for Vertex AI streaming * refactor: move endpoints services outside of /routes dir to /services/Endpoints * refactor: shorten schemas import * refactor: rename PALM to GOOGLE * feat: make Google editable endpoint * feat: reusable Ask and Edit controllers based off Anthropic * chore: organize imports/logic * fix(parseConvo): include examples in googleSchema * fix: google only allows odd number of messages to be sent * fix: pass proxy to AnthropicClient * refactor: change `google` altName to `Google` * refactor: update getModelMaxTokens and related functions to handle maxTokensMap with nested endpoint model key/values * refactor: google Icon and response sender changes (Codey and Google logo instead of PaLM in all cases) * feat: google support for maxTokensMap * feat: google updated endpoints with Ask/Edit controllers, buildOptions, and initializeClient * feat(GoogleClient): now builds prompt for text models and supports real streaming from Vertex AI through langchain * chore(GoogleClient): remove comments, left before for reference in git history * docs: update google instructions (WIP) * docs(apis_and_tokens.md): add images to google instructions * docs: remove typo apis_and_tokens.md * Update apis_and_tokens.md * feat(Google): use default settings map, fully support context for both text and chat models, fully support examples for chat models * chore: update more PaLM references to Google * chore: move playwright out of workflows to avoid failing tests
This commit is contained in:
parent
8a1968b2f8
commit
583e978a82
90 changed files with 1613 additions and 784 deletions
22
api/server/services/Endpoints/openAI/addTitle.js
Normal file
22
api/server/services/Endpoints/openAI/addTitle.js
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
const { saveConvo } = require('~/models');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
|
||||
const addTitle = async (req, { text, response, client }) => {
|
||||
const { TITLE_CONVO = 'true' } = process.env ?? {};
|
||||
if (!isEnabled(TITLE_CONVO)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the request was aborted, don't generate the title.
|
||||
if (client.abortController.signal.aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
const title = await client.titleConvo({ text, responseText: response?.text });
|
||||
await saveConvo(req.user.id, {
|
||||
conversationId: response.conversationId,
|
||||
title,
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = addTitle;
|
||||
15
api/server/services/Endpoints/openAI/buildOptions.js
Normal file
15
api/server/services/Endpoints/openAI/buildOptions.js
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
const buildOptions = (endpoint, parsedBody) => {
|
||||
const { chatGptLabel, promptPrefix, ...rest } = parsedBody;
|
||||
const endpointOption = {
|
||||
endpoint,
|
||||
chatGptLabel,
|
||||
promptPrefix,
|
||||
modelOptions: {
|
||||
...rest,
|
||||
},
|
||||
};
|
||||
|
||||
return endpointOption;
|
||||
};
|
||||
|
||||
module.exports = buildOptions;
|
||||
9
api/server/services/Endpoints/openAI/index.js
Normal file
9
api/server/services/Endpoints/openAI/index.js
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
const addTitle = require('./addTitle');
|
||||
const buildOptions = require('./buildOptions');
|
||||
const initializeClient = require('./initializeClient');
|
||||
|
||||
module.exports = {
|
||||
addTitle,
|
||||
buildOptions,
|
||||
initializeClient,
|
||||
};
|
||||
61
api/server/services/Endpoints/openAI/initializeClient.js
Normal file
61
api/server/services/Endpoints/openAI/initializeClient.js
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
const { OpenAIClient } = require('~/app');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { getAzureCredentials } = require('~/utils');
|
||||
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
|
||||
const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
const {
|
||||
PROXY,
|
||||
OPENAI_API_KEY,
|
||||
AZURE_API_KEY,
|
||||
OPENAI_REVERSE_PROXY,
|
||||
OPENAI_SUMMARIZE,
|
||||
DEBUG_OPENAI,
|
||||
} = process.env;
|
||||
const { key: expiresAt, endpoint } = req.body;
|
||||
const contextStrategy = isEnabled(OPENAI_SUMMARIZE) ? 'summarize' : null;
|
||||
const clientOptions = {
|
||||
debug: isEnabled(DEBUG_OPENAI),
|
||||
contextStrategy,
|
||||
reverseProxyUrl: OPENAI_REVERSE_PROXY ?? null,
|
||||
proxy: PROXY ?? null,
|
||||
req,
|
||||
res,
|
||||
...endpointOption,
|
||||
};
|
||||
|
||||
const credentials = {
|
||||
openAI: OPENAI_API_KEY,
|
||||
azureOpenAI: AZURE_API_KEY,
|
||||
};
|
||||
|
||||
const isUserProvided = credentials[endpoint] === 'user_provided';
|
||||
|
||||
let userKey = null;
|
||||
if (expiresAt && isUserProvided) {
|
||||
checkUserKeyExpiry(
|
||||
expiresAt,
|
||||
'Your OpenAI API key has expired. Please provide your API key again.',
|
||||
);
|
||||
userKey = await getUserKey({ userId: req.user.id, name: endpoint });
|
||||
}
|
||||
|
||||
let apiKey = isUserProvided ? userKey : credentials[endpoint];
|
||||
|
||||
if (endpoint === 'azureOpenAI') {
|
||||
clientOptions.azure = isUserProvided ? JSON.parse(userKey) : getAzureCredentials();
|
||||
apiKey = clientOptions.azure.azureOpenAIApiKey;
|
||||
}
|
||||
|
||||
if (!apiKey) {
|
||||
throw new Error('API key not provided.');
|
||||
}
|
||||
|
||||
const client = new OpenAIClient(apiKey, clientOptions);
|
||||
return {
|
||||
client,
|
||||
openAIApiKey: apiKey,
|
||||
};
|
||||
};
|
||||
|
||||
module.exports = initializeClient;
|
||||
199
api/server/services/Endpoints/openAI/initializeClient.spec.js
Normal file
199
api/server/services/Endpoints/openAI/initializeClient.spec.js
Normal file
|
|
@ -0,0 +1,199 @@
|
|||
const { OpenAIClient } = require('~/app');
|
||||
const initializeClient = require('./initializeClient');
|
||||
const { getUserKey } = require('~/server/services/UserService');
|
||||
|
||||
// Mock getUserKey since it's the only function we want to mock
|
||||
jest.mock('~/server/services/UserService', () => ({
|
||||
getUserKey: jest.fn(),
|
||||
checkUserKeyExpiry: jest.requireActual('~/server/services/UserService').checkUserKeyExpiry,
|
||||
}));
|
||||
|
||||
describe('initializeClient', () => {
|
||||
// Set up environment variables
|
||||
const originalEnvironment = process.env;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetModules(); // Clears the cache
|
||||
process.env = { ...originalEnvironment }; // Make a copy
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
process.env = originalEnvironment; // Restore original env vars
|
||||
});
|
||||
|
||||
test('should initialize client with OpenAI API key and default options', async () => {
|
||||
process.env.OPENAI_API_KEY = 'test-openai-api-key';
|
||||
process.env.DEBUG_OPENAI = 'false';
|
||||
process.env.OPENAI_SUMMARIZE = 'false';
|
||||
|
||||
const req = {
|
||||
body: { key: null, endpoint: 'openAI' },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
const client = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(client.openAIApiKey).toBe('test-openai-api-key');
|
||||
expect(client.client).toBeInstanceOf(OpenAIClient);
|
||||
});
|
||||
|
||||
test('should initialize client with Azure credentials when endpoint is azureOpenAI', async () => {
|
||||
process.env.AZURE_API_KEY = 'test-azure-api-key';
|
||||
(process.env.AZURE_OPENAI_API_INSTANCE_NAME = 'some-value'),
|
||||
(process.env.AZURE_OPENAI_API_DEPLOYMENT_NAME = 'some-value'),
|
||||
(process.env.AZURE_OPENAI_API_VERSION = 'some-value'),
|
||||
(process.env.AZURE_OPENAI_API_COMPLETIONS_DEPLOYMENT_NAME = 'some-value'),
|
||||
(process.env.AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME = 'some-value'),
|
||||
(process.env.OPENAI_API_KEY = 'test-openai-api-key');
|
||||
process.env.DEBUG_OPENAI = 'false';
|
||||
process.env.OPENAI_SUMMARIZE = 'false';
|
||||
|
||||
const req = {
|
||||
body: { key: null, endpoint: 'azureOpenAI' },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'test-model' } };
|
||||
|
||||
const client = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(client.openAIApiKey).toBe('test-azure-api-key');
|
||||
expect(client.client).toBeInstanceOf(OpenAIClient);
|
||||
});
|
||||
|
||||
test('should use the debug option when DEBUG_OPENAI is enabled', async () => {
|
||||
process.env.OPENAI_API_KEY = 'test-openai-api-key';
|
||||
process.env.DEBUG_OPENAI = 'true';
|
||||
|
||||
const req = {
|
||||
body: { key: null, endpoint: 'openAI' },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
const client = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(client.client.options.debug).toBe(true);
|
||||
});
|
||||
|
||||
test('should set contextStrategy to summarize when OPENAI_SUMMARIZE is enabled', async () => {
|
||||
process.env.OPENAI_API_KEY = 'test-openai-api-key';
|
||||
process.env.OPENAI_SUMMARIZE = 'true';
|
||||
|
||||
const req = {
|
||||
body: { key: null, endpoint: 'openAI' },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
const client = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(client.client.options.contextStrategy).toBe('summarize');
|
||||
});
|
||||
|
||||
test('should set reverseProxyUrl and proxy when they are provided in the environment', async () => {
|
||||
process.env.OPENAI_API_KEY = 'test-openai-api-key';
|
||||
process.env.OPENAI_REVERSE_PROXY = 'http://reverse.proxy';
|
||||
process.env.PROXY = 'http://proxy';
|
||||
|
||||
const req = {
|
||||
body: { key: null, endpoint: 'openAI' },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
const client = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
expect(client.client.options.reverseProxyUrl).toBe('http://reverse.proxy');
|
||||
expect(client.client.options.proxy).toBe('http://proxy');
|
||||
});
|
||||
|
||||
test('should throw an error if the user-provided key has expired', async () => {
|
||||
process.env.OPENAI_API_KEY = 'user_provided';
|
||||
process.env.AZURE_API_KEY = 'user_provided';
|
||||
process.env.DEBUG_OPENAI = 'false';
|
||||
process.env.OPENAI_SUMMARIZE = 'false';
|
||||
|
||||
const expiresAt = new Date(Date.now() - 10000).toISOString(); // Expired
|
||||
const req = {
|
||||
body: { key: expiresAt, endpoint: 'openAI' },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
'Your OpenAI API key has expired. Please provide your API key again.',
|
||||
);
|
||||
});
|
||||
|
||||
test('should throw an error if no API keys are provided in the environment', async () => {
|
||||
// Clear the environment variables for API keys
|
||||
delete process.env.OPENAI_API_KEY;
|
||||
delete process.env.AZURE_API_KEY;
|
||||
|
||||
const req = {
|
||||
body: { key: null, endpoint: 'openAI' },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
'API key not provided.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle user-provided keys and check expiry', async () => {
|
||||
// Set up the req.body to simulate user-provided key scenario
|
||||
const req = {
|
||||
body: {
|
||||
key: new Date(Date.now() + 10000).toISOString(),
|
||||
endpoint: 'openAI',
|
||||
},
|
||||
user: {
|
||||
id: '123',
|
||||
},
|
||||
};
|
||||
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
// Ensure the environment variable is set to 'user_provided' to match the isUserProvided condition
|
||||
process.env.OPENAI_API_KEY = 'user_provided';
|
||||
|
||||
// Mock getUserKey to return the expected key
|
||||
getUserKey.mockResolvedValue('test-user-provided-openai-api-key');
|
||||
|
||||
// Call the initializeClient function
|
||||
const result = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
// Assertions
|
||||
expect(result.openAIApiKey).toBe('test-user-provided-openai-api-key');
|
||||
});
|
||||
|
||||
test('should throw an error if the user-provided key is invalid', async () => {
|
||||
const invalidKey = new Date(Date.now() - 100000).toISOString();
|
||||
const req = {
|
||||
body: { key: invalidKey, endpoint: 'openAI' },
|
||||
user: { id: '123' },
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
// Ensure the environment variable is set to 'user_provided' to match the isUserProvided condition
|
||||
process.env.OPENAI_API_KEY = 'user_provided';
|
||||
|
||||
// Mock getUserKey to return an invalid key
|
||||
getUserKey.mockResolvedValue(invalidKey);
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
/Your OpenAI API key has expired/,
|
||||
);
|
||||
});
|
||||
});
|
||||
Loading…
Add table
Add a link
Reference in a new issue