mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-18 17:30:16 +01:00
refactor(initializeFunctionsAgent.js): remove unused code and comments (#544)
feat(initializeFunctionsAgent.js): add support for openai-functions agent type feat(askGPTPlugins.js): change default agent to functions and skip completion feat(cleanupPreset.js): change default agent to functions and skip completion feat(getDefaultConversation.js): change default agent to functions and skip completion feat(handleSubmit.js): change default agent to functions and skip completion
This commit is contained in:
parent
731304f96a
commit
7efb90366f
5 changed files with 9 additions and 21 deletions
|
|
@ -1,5 +1,3 @@
|
||||||
// const FunctionsAgent = require('./FunctionsAgent');
|
|
||||||
// const { AgentExecutor, initializeAgentExecutorWithOptions } = require('langchain/agents');
|
|
||||||
const { initializeAgentExecutorWithOptions } = require('langchain/agents');
|
const { initializeAgentExecutorWithOptions } = require('langchain/agents');
|
||||||
const { BufferMemory, ChatMessageHistory } = require('langchain/memory');
|
const { BufferMemory, ChatMessageHistory } = require('langchain/memory');
|
||||||
|
|
||||||
|
|
@ -10,13 +8,6 @@ const initializeFunctionsAgent = async ({
|
||||||
// currentDateString,
|
// currentDateString,
|
||||||
...rest
|
...rest
|
||||||
}) => {
|
}) => {
|
||||||
// const agent = FunctionsAgent.fromLLMAndTools(
|
|
||||||
// model,
|
|
||||||
// tools,
|
|
||||||
// {
|
|
||||||
// currentDateString,
|
|
||||||
// });
|
|
||||||
|
|
||||||
|
|
||||||
const memory = new BufferMemory({
|
const memory = new BufferMemory({
|
||||||
chatHistory: new ChatMessageHistory(pastMessages),
|
chatHistory: new ChatMessageHistory(pastMessages),
|
||||||
|
|
@ -28,15 +19,12 @@ const initializeFunctionsAgent = async ({
|
||||||
returnMessages: true,
|
returnMessages: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
// return AgentExecutor.fromAgentAndTools({ agent, tools, memory, ...rest });
|
|
||||||
|
|
||||||
return await initializeAgentExecutorWithOptions(
|
return await initializeAgentExecutorWithOptions(
|
||||||
tools,
|
tools,
|
||||||
model,
|
model,
|
||||||
{
|
{
|
||||||
agentType: "openai-functions",
|
agentType: "openai-functions",
|
||||||
memory,
|
memory,
|
||||||
maxIterations: 4,
|
|
||||||
...rest,
|
...rest,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
|
||||||
|
|
@ -39,8 +39,8 @@ router.post('/', requireJwtAuth, async (req, res) => {
|
||||||
if (endpoint !== 'gptPlugins') return handleError(res, { text: 'Illegal request' });
|
if (endpoint !== 'gptPlugins') return handleError(res, { text: 'Illegal request' });
|
||||||
|
|
||||||
const agentOptions = req.body?.agentOptions ?? {
|
const agentOptions = req.body?.agentOptions ?? {
|
||||||
agent: 'classic',
|
agent: 'functions',
|
||||||
skipCompletion: false,
|
skipCompletion: true,
|
||||||
model: 'gpt-3.5-turbo',
|
model: 'gpt-3.5-turbo',
|
||||||
temperature: 0,
|
temperature: 0,
|
||||||
// top_p: 1,
|
// top_p: 1,
|
||||||
|
|
|
||||||
|
|
@ -51,8 +51,8 @@ const cleanupPreset = ({ preset: _preset, endpointsConfig = {} }) => {
|
||||||
};
|
};
|
||||||
} else if (endpoint === 'gptPlugins') {
|
} else if (endpoint === 'gptPlugins') {
|
||||||
const agentOptions = _preset?.agentOptions ?? {
|
const agentOptions = _preset?.agentOptions ?? {
|
||||||
agent: 'classic',
|
agent: 'functions',
|
||||||
skipCompletion: false,
|
skipCompletion: true,
|
||||||
model: 'gpt-3.5-turbo',
|
model: 'gpt-3.5-turbo',
|
||||||
temperature: 0,
|
temperature: 0,
|
||||||
// top_p: 1,
|
// top_p: 1,
|
||||||
|
|
|
||||||
|
|
@ -67,8 +67,8 @@ const buildDefaultConversation = ({
|
||||||
};
|
};
|
||||||
} else if (endpoint === 'gptPlugins') {
|
} else if (endpoint === 'gptPlugins') {
|
||||||
const agentOptions = lastConversationSetup?.agentOptions ?? {
|
const agentOptions = lastConversationSetup?.agentOptions ?? {
|
||||||
agent: 'classic',
|
agent: 'functions',
|
||||||
skipCompletion: false,
|
skipCompletion: true,
|
||||||
model: 'gpt-3.5-turbo',
|
model: 'gpt-3.5-turbo',
|
||||||
temperature: 0,
|
temperature: 0,
|
||||||
// top_p: 1,
|
// top_p: 1,
|
||||||
|
|
|
||||||
|
|
@ -88,8 +88,8 @@ const useMessageHandler = () => {
|
||||||
responseSender = 'ChatGPT';
|
responseSender = 'ChatGPT';
|
||||||
} else if (endpoint === 'gptPlugins') {
|
} else if (endpoint === 'gptPlugins') {
|
||||||
const agentOptions = currentConversation?.agentOptions ?? {
|
const agentOptions = currentConversation?.agentOptions ?? {
|
||||||
agent: 'classic',
|
agent: 'functions',
|
||||||
skipCompletion: false,
|
skipCompletion: true,
|
||||||
model: 'gpt-3.5-turbo',
|
model: 'gpt-3.5-turbo',
|
||||||
temperature: 0,
|
temperature: 0,
|
||||||
// top_p: 1,
|
// top_p: 1,
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue