mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-17 17:00:15 +01:00
🦙 fix: Ollama Custom Headers (#10314)
* 🦙 fix: Ollama Custom Headers
* chore: Correct import order for resolveHeaders in OllamaClient.js
* fix: Improve error logging for Ollama API model fetch failure
* ci: update Ollama model fetch tests
* ci: Add unit test for passing headers and user object to Ollama fetchModels
This commit is contained in:
parent
5e35b7d09d
commit
d904b281f1
5 changed files with 107 additions and 56 deletions
|
|
@ -1,4 +1,3 @@
|
|||
const { Providers } = require('@librechat/agents');
|
||||
const {
|
||||
resolveHeaders,
|
||||
isUserProvided,
|
||||
|
|
@ -143,39 +142,27 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid
|
|||
|
||||
if (optionsOnly) {
|
||||
const modelOptions = endpointOption?.model_parameters ?? {};
|
||||
if (endpoint !== Providers.OLLAMA) {
|
||||
clientOptions = Object.assign(
|
||||
{
|
||||
modelOptions,
|
||||
},
|
||||
clientOptions,
|
||||
);
|
||||
clientOptions.modelOptions.user = req.user.id;
|
||||
const options = getOpenAIConfig(apiKey, clientOptions, endpoint);
|
||||
if (options != null) {
|
||||
options.useLegacyContent = true;
|
||||
options.endpointTokenConfig = endpointTokenConfig;
|
||||
}
|
||||
if (!clientOptions.streamRate) {
|
||||
return options;
|
||||
}
|
||||
options.llmConfig.callbacks = [
|
||||
{
|
||||
handleLLMNewToken: createHandleLLMNewToken(clientOptions.streamRate),
|
||||
},
|
||||
];
|
||||
clientOptions = Object.assign(
|
||||
{
|
||||
modelOptions,
|
||||
},
|
||||
clientOptions,
|
||||
);
|
||||
clientOptions.modelOptions.user = req.user.id;
|
||||
const options = getOpenAIConfig(apiKey, clientOptions, endpoint);
|
||||
if (options != null) {
|
||||
options.useLegacyContent = true;
|
||||
options.endpointTokenConfig = endpointTokenConfig;
|
||||
}
|
||||
if (!clientOptions.streamRate) {
|
||||
return options;
|
||||
}
|
||||
|
||||
if (clientOptions.reverseProxyUrl) {
|
||||
modelOptions.baseUrl = clientOptions.reverseProxyUrl.split('/v1')[0];
|
||||
delete clientOptions.reverseProxyUrl;
|
||||
}
|
||||
|
||||
return {
|
||||
useLegacyContent: true,
|
||||
llmConfig: modelOptions,
|
||||
};
|
||||
options.llmConfig.callbacks = [
|
||||
{
|
||||
handleLLMNewToken: createHandleLLMNewToken(clientOptions.streamRate),
|
||||
},
|
||||
];
|
||||
return options;
|
||||
}
|
||||
|
||||
const client = new OpenAIClient(apiKey, clientOptions);
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue