🔗 feat: User Provided Base URL for OpenAI endpoints (#1919)

* chore: bump browserslist-db@latest

* refactor(EndpointService): simplify with `generateConfig`, utilize optional baseURL for OpenAI-based endpoints, use `isUserProvided` helper fn wherever needed

* refactor(custom/initializeClient): use standardized naming for common variables

* feat: user provided baseURL for openAI-based endpoints

* refactor(custom/initializeClient): re-order operations

* fix: knownendpoints enum definition and add FetchTokenConfig, bump data-provider

* refactor(custom): use tokenKey dependent on userProvided conditions for caching and fetching endpointTokenConfig, anticipate token rates from custom config

* refactor(custom): assure endpointTokenConfig is only accessed from cache if qualifies for fetching

* fix(ci): update tests for initializeClient based on userProvideURL changes

* fix(EndpointService): correct baseURL env var for assistants: `ASSISTANTS_BASE_URL`

* fix: unnecessary run cancellation on res.close() when response.run is completed

* feat(assistants): user provided URL option

* ci: update tests and add test for `assistants` endpoint

* chore: leaner condition for request closing

* chore: more descriptive error message to provide keys again
This commit is contained in:
Danny Avila 2024-02-28 14:27:19 -05:00 committed by GitHub
parent 53ae2d7bfb
commit 2f92b54787
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
17 changed files with 762 additions and 226 deletions

View file

@ -20,6 +20,7 @@ const { openAIApiKey, userProvidedOpenAI } = require('./Config/EndpointService')
* @param {boolean} [params.azure=false] - Whether to fetch models from Azure.
* @param {boolean} [params.userIdQuery=false] - Whether to send the user ID as a query parameter.
* @param {boolean} [params.createTokenConfig=true] - Whether to create a token configuration from the API response.
* @param {string} [params.tokenKey] - The cache key to save the token configuration. Uses `name` if omitted.
* @returns {Promise<string[]>} A promise that resolves to an array of model identifiers.
* @async
*/
@ -31,6 +32,7 @@ const fetchModels = async ({
azure = false,
userIdQuery = false,
createTokenConfig = true,
tokenKey,
}) => {
let models = [];
@ -70,7 +72,7 @@ const fetchModels = async ({
if (validationResult.success && createTokenConfig) {
const endpointTokenConfig = processModelData(input);
const cache = getLogStores(CacheKeys.TOKEN_CONFIG);
await cache.set(name, endpointTokenConfig);
await cache.set(tokenKey ?? name, endpointTokenConfig);
}
models = input.data.map((item) => item.id);
} catch (error) {