mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-27 05:38:51 +01:00
🧠 feat: Reasoning UI for Agents (#5904)
* chore: bump https-proxy-agent and @librechat/agents * refactor: Improve error logging in OllamaClient for API fetch failures * feat: Add DeepSeek provider support and enhance provider name handling * refactor: Use Providers.OLLAMA constant for model name check in fetchModels function * feat: Enhance formatAgentMessages to handle reasoning content type * feat: OpenRouter Agent Reasoning * hard work and dedicationgit add .env.example :) * fix: Handle Google social login with missing last name Social login with Google was previously displaying 'undefined' when a user's last name was empty or not provided. Changes: - Conditionally render last name only if it exists - Prevent displaying 'undefined' when last name is missing * fix: add missing file endings for developers yml,yaml and log --------- Co-authored-by: Mohamed Al-Duraji <mbalduraji@college.harvard.edu> Co-authored-by: Deepak Kendole <deepakdpk101@gmail.com> Co-authored-by: Peter Rothlaender <peter.rothlaender@ginkgo.com>
This commit is contained in:
parent
e3b5c59949
commit
350e72dede
16 changed files with 757 additions and 618 deletions
|
|
@ -199,6 +199,22 @@ function getDefaultHandlers({ res, aggregateContent, toolEndCallback, collectedU
|
|||
aggregateContent({ event, data });
|
||||
},
|
||||
},
|
||||
[GraphEvents.ON_REASONING_DELTA]: {
|
||||
/**
|
||||
* Handle ON_REASONING_DELTA event.
|
||||
* @param {string} event - The event name.
|
||||
* @param {StreamEventData} data - The event data.
|
||||
* @param {GraphRunnableConfig['configurable']} [metadata] The runnable metadata.
|
||||
*/
|
||||
handle: (event, data, metadata) => {
|
||||
if (metadata?.last_agent_index === metadata?.agent_index) {
|
||||
sendEvent(res, { event, data });
|
||||
} else if (!metadata?.hide_sequential_outputs) {
|
||||
sendEvent(res, { event, data });
|
||||
}
|
||||
aggregateContent({ event, data });
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
return handlers;
|
||||
|
|
|
|||
|
|
@ -20,11 +20,6 @@ const {
|
|||
bedrockOutputParser,
|
||||
removeNullishValues,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
extractBaseURL,
|
||||
// constructAzureURL,
|
||||
// genAzureChatCompletion,
|
||||
} = require('~/utils');
|
||||
const {
|
||||
formatMessage,
|
||||
formatAgentMessages,
|
||||
|
|
@ -477,19 +472,6 @@ class AgentClient extends BaseClient {
|
|||
abortController = new AbortController();
|
||||
}
|
||||
|
||||
const baseURL = extractBaseURL(this.completionsUrl);
|
||||
logger.debug('[api/server/controllers/agents/client.js] chatCompletion', {
|
||||
baseURL,
|
||||
payload,
|
||||
});
|
||||
|
||||
// if (this.useOpenRouter) {
|
||||
// opts.defaultHeaders = {
|
||||
// 'HTTP-Referer': 'https://librechat.ai',
|
||||
// 'X-Title': 'LibreChat',
|
||||
// };
|
||||
// }
|
||||
|
||||
// if (this.options.headers) {
|
||||
// opts.defaultHeaders = { ...opts.defaultHeaders, ...this.options.headers };
|
||||
// }
|
||||
|
|
@ -626,7 +608,7 @@ class AgentClient extends BaseClient {
|
|||
let systemContent = [
|
||||
systemMessage,
|
||||
agent.instructions ?? '',
|
||||
i !== 0 ? agent.additional_instructions ?? '' : '',
|
||||
i !== 0 ? (agent.additional_instructions ?? '') : '',
|
||||
]
|
||||
.join('\n')
|
||||
.trim();
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const { Run, Providers } = require('@librechat/agents');
|
||||
const { providerEndpointMap } = require('librechat-data-provider');
|
||||
const { providerEndpointMap, KnownEndpoints } = require('librechat-data-provider');
|
||||
|
||||
/**
|
||||
* @typedef {import('@librechat/agents').t} t
|
||||
|
|
@ -7,6 +7,7 @@ const { providerEndpointMap } = require('librechat-data-provider');
|
|||
* @typedef {import('@librechat/agents').StreamEventData} StreamEventData
|
||||
* @typedef {import('@librechat/agents').EventHandler} EventHandler
|
||||
* @typedef {import('@librechat/agents').GraphEvents} GraphEvents
|
||||
* @typedef {import('@librechat/agents').LLMConfig} LLMConfig
|
||||
* @typedef {import('@librechat/agents').IState} IState
|
||||
*/
|
||||
|
||||
|
|
@ -32,6 +33,7 @@ async function createRun({
|
|||
streamUsage = true,
|
||||
}) {
|
||||
const provider = providerEndpointMap[agent.provider] ?? agent.provider;
|
||||
/** @type {LLMConfig} */
|
||||
const llmConfig = Object.assign(
|
||||
{
|
||||
provider,
|
||||
|
|
@ -41,6 +43,11 @@ async function createRun({
|
|||
agent.model_parameters,
|
||||
);
|
||||
|
||||
/** @type {'reasoning_content' | 'reasoning'} */
|
||||
let reasoningKey;
|
||||
if (llmConfig.configuration?.baseURL.includes(KnownEndpoints.openrouter)) {
|
||||
reasoningKey = 'reasoning';
|
||||
}
|
||||
if (/o1(?!-(?:mini|preview)).*$/.test(llmConfig.model)) {
|
||||
llmConfig.streaming = false;
|
||||
llmConfig.disableStreaming = true;
|
||||
|
|
@ -50,6 +57,7 @@ async function createRun({
|
|||
const graphConfig = {
|
||||
signal,
|
||||
llmConfig,
|
||||
reasoningKey,
|
||||
tools: agent.tools,
|
||||
instructions: agent.instructions,
|
||||
additional_instructions: agent.additional_instructions,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue