2023-11-22 13:56:38 -05:00
|
|
|
const { EModelEndpoint } = require('~/server/routes/endpoints/schemas');
|
|
|
|
|
const { availableTools } = require('~/app/clients/tools');
|
|
|
|
|
const { addOpenAPISpecs } = require('~/app/clients/tools/util/addOpenAPISpecs');
|
2023-09-18 12:55:51 -04:00
|
|
|
const {
|
|
|
|
|
openAIApiKey,
|
|
|
|
|
azureOpenAIApiKey,
|
|
|
|
|
useAzurePlugins,
|
|
|
|
|
userProvidedOpenAI,
|
|
|
|
|
palmKey,
|
|
|
|
|
openAI,
|
feat: Vision Support + New UI (#1203)
* feat: add timer duration to showToast, show toast for preset selection
* refactor: replace old /chat/ route with /c/. e2e tests will fail here
* refactor: move typedefs to root of /api/ and add a few to assistant types in TS
* refactor: reorganize data-provider imports, fix dependency cycle, strategize new plan to separate react dependent packages
* feat: add dataService for uploading images
* feat(data-provider): add mutation keys
* feat: file resizing and upload
* WIP: initial API image handling
* fix: catch JSON.parse of localStorage tools
* chore: experimental: use module-alias for absolute imports
* refactor: change temp_file_id strategy
* fix: updating files state by using Map and defining react query callbacks in a way that keeps them during component unmount, initial delete handling
* feat: properly handle file deletion
* refactor: unexpose complete filepath and resize from server for higher fidelity
* fix: make sure resized height, width is saved, catch bad requests
* refactor: use absolute imports
* fix: prevent setOptions from being called more than once for OpenAIClient, made note to fix for PluginsClient
* refactor: import supportsFiles and models vars from schemas
* fix: correctly replace temp file id
* refactor(BaseClient): use absolute imports, pass message 'opts' to buildMessages method, count tokens for nested objects/arrays
* feat: add validateVisionModel to determine if model has vision capabilities
* chore(checkBalance): update jsdoc
* feat: formatVisionMessage: change message content format dependent on role and image_urls passed
* refactor: add usage to File schema, make create and updateFile, correctly set and remove TTL
* feat: working vision support
TODO: file size, type, amount validations, making sure they are styled right, and making sure you can add images from the clipboard/dragging
* feat: clipboard support for uploading images
* feat: handle files on drop to screen, refactor top level view code to Presentation component so the useDragHelpers hook has ChatContext
* fix(Images): replace uploaded images in place
* feat: add filepath validation to protect sensitive files
* fix: ensure correct file_ids are push and not the Map key values
* fix(ToastContext): type issue
* feat: add basic file validation
* fix(useDragHelpers): correct context issue with `files` dependency
* refactor: consolidate setErrors logic to setError
* feat: add dialog Image overlay on image click
* fix: close endpoints menu on click
* chore: set detail to auto, make note for configuration
* fix: react warning (button desc. of button)
* refactor: optimize filepath handling, pass file_ids to images for easier re-use
* refactor: optimize image file handling, allow re-using files in regen, pass more file metadata in messages
* feat: lazy loading images including use of upload preview
* fix: SetKeyDialog closing, stopPropagation on Dialog content click
* style(EndpointMenuItem): tighten up the style, fix dark theme showing in lightmode, make menu more ux friendly
* style: change maxheight of all settings textareas to 138px from 300px
* style: better styling for textarea and enclosing buttons
* refactor(PresetItems): swap back edit and delete icons
* feat: make textarea placeholder dynamic to endpoint
* style: show user hover buttons only on hover when message is streaming
* fix: ordered list not going past 9, fix css
* feat: add User/AI labels; style: hide loading spinner
* feat: add back custom footer, change original footer text
* feat: dynamic landing icons based on endpoint
* chore: comment out assistants route
* fix: autoScroll to newest on /c/ view
* fix: Export Conversation on new UI
* style: match message style of official more closely
* ci: fix api jest unit tests, comment out e2e tests for now as they will fail until addressed
* feat: more file validation and use blob in preview field, not filepath, to fix temp deletion
* feat: filefilter for multer
* feat: better AI labels based on custom name, model, and endpoint instead of `ChatGPT`
2023-11-21 20:12:48 -05:00
|
|
|
// assistant,
|
2023-09-18 12:55:51 -04:00
|
|
|
azureOpenAI,
|
|
|
|
|
bingAI,
|
|
|
|
|
chatGPTBrowser,
|
|
|
|
|
anthropic,
|
2023-11-22 13:56:38 -05:00
|
|
|
} = require('~/server/services/EndpointService').config;
|
2023-09-18 12:55:51 -04:00
|
|
|
|
|
|
|
|
let i = 0;
|
|
|
|
|
async function endpointController(req, res) {
|
|
|
|
|
let key, palmUser;
|
|
|
|
|
try {
|
2023-11-22 13:56:38 -05:00
|
|
|
key = require('~/data/auth.json');
|
2023-09-18 12:55:51 -04:00
|
|
|
} catch (e) {
|
|
|
|
|
if (i === 0) {
|
|
|
|
|
i++;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (palmKey === 'user_provided') {
|
|
|
|
|
palmUser = true;
|
|
|
|
|
if (i <= 1) {
|
|
|
|
|
i++;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const tools = await addOpenAPISpecs(availableTools);
|
|
|
|
|
function transformToolsToMap(tools) {
|
|
|
|
|
return tools.reduce((map, obj) => {
|
|
|
|
|
map[obj.pluginKey] = obj.name;
|
|
|
|
|
return map;
|
|
|
|
|
}, {});
|
|
|
|
|
}
|
|
|
|
|
const plugins = transformToolsToMap(tools);
|
|
|
|
|
|
|
|
|
|
const google = key || palmUser ? { userProvide: palmUser } : false;
|
|
|
|
|
|
|
|
|
|
const gptPlugins =
|
|
|
|
|
openAIApiKey || azureOpenAIApiKey
|
|
|
|
|
? {
|
|
|
|
|
plugins,
|
|
|
|
|
availableAgents: ['classic', 'functions'],
|
|
|
|
|
userProvide: userProvidedOpenAI,
|
|
|
|
|
azure: useAzurePlugins,
|
|
|
|
|
}
|
|
|
|
|
: false;
|
|
|
|
|
|
2023-11-22 13:56:38 -05:00
|
|
|
let enabledEndpoints = [
|
|
|
|
|
EModelEndpoint.openAI,
|
|
|
|
|
EModelEndpoint.azureOpenAI,
|
|
|
|
|
EModelEndpoint.google,
|
|
|
|
|
EModelEndpoint.bingAI,
|
|
|
|
|
EModelEndpoint.chatGPTBrowser,
|
|
|
|
|
EModelEndpoint.gptPlugins,
|
|
|
|
|
EModelEndpoint.anthropic,
|
|
|
|
|
];
|
|
|
|
|
|
|
|
|
|
const endpointsEnv = process.env.ENDPOINTS || '';
|
|
|
|
|
if (endpointsEnv) {
|
|
|
|
|
enabledEndpoints = endpointsEnv
|
|
|
|
|
.split(',')
|
|
|
|
|
.filter((endpoint) => endpoint?.trim())
|
|
|
|
|
.map((endpoint) => endpoint.trim());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const endpointConfig = {
|
|
|
|
|
[EModelEndpoint.openAI]: openAI,
|
|
|
|
|
[EModelEndpoint.azureOpenAI]: azureOpenAI,
|
|
|
|
|
[EModelEndpoint.google]: google,
|
|
|
|
|
[EModelEndpoint.bingAI]: bingAI,
|
|
|
|
|
[EModelEndpoint.chatGPTBrowser]: chatGPTBrowser,
|
|
|
|
|
[EModelEndpoint.gptPlugins]: gptPlugins,
|
|
|
|
|
[EModelEndpoint.anthropic]: anthropic,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
const orderedAndFilteredEndpoints = enabledEndpoints.reduce((config, key, index) => {
|
|
|
|
|
if (endpointConfig[key]) {
|
|
|
|
|
config[key] = { ...(endpointConfig[key] ?? {}), order: index };
|
|
|
|
|
}
|
|
|
|
|
return config;
|
|
|
|
|
}, {});
|
|
|
|
|
|
|
|
|
|
res.send(JSON.stringify(orderedAndFilteredEndpoints));
|
2023-09-18 12:55:51 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
module.exports = endpointController;
|