mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-16 16:30:15 +01:00
refactor(initializeFunctionsAgent.js): remove unused code and comments (#544)
feat(initializeFunctionsAgent.js): add support for openai-functions agent type feat(askGPTPlugins.js): change default agent to functions and skip completion feat(cleanupPreset.js): change default agent to functions and skip completion feat(getDefaultConversation.js): change default agent to functions and skip completion feat(handleSubmit.js): change default agent to functions and skip completion
This commit is contained in:
parent
731304f96a
commit
7efb90366f
5 changed files with 9 additions and 21 deletions
|
|
@ -1,5 +1,3 @@
|
|||
// const FunctionsAgent = require('./FunctionsAgent');
|
||||
// const { AgentExecutor, initializeAgentExecutorWithOptions } = require('langchain/agents');
|
||||
const { initializeAgentExecutorWithOptions } = require('langchain/agents');
|
||||
const { BufferMemory, ChatMessageHistory } = require('langchain/memory');
|
||||
|
||||
|
|
@ -10,14 +8,7 @@ const initializeFunctionsAgent = async ({
|
|||
// currentDateString,
|
||||
...rest
|
||||
}) => {
|
||||
// const agent = FunctionsAgent.fromLLMAndTools(
|
||||
// model,
|
||||
// tools,
|
||||
// {
|
||||
// currentDateString,
|
||||
// });
|
||||
|
||||
|
||||
|
||||
const memory = new BufferMemory({
|
||||
chatHistory: new ChatMessageHistory(pastMessages),
|
||||
memoryKey: 'chat_history',
|
||||
|
|
@ -28,15 +19,12 @@ const initializeFunctionsAgent = async ({
|
|||
returnMessages: true,
|
||||
});
|
||||
|
||||
// return AgentExecutor.fromAgentAndTools({ agent, tools, memory, ...rest });
|
||||
|
||||
return await initializeAgentExecutorWithOptions(
|
||||
tools,
|
||||
model,
|
||||
{
|
||||
agentType: "openai-functions",
|
||||
memory,
|
||||
maxIterations: 4,
|
||||
...rest,
|
||||
}
|
||||
);
|
||||
|
|
|
|||
|
|
@ -39,8 +39,8 @@ router.post('/', requireJwtAuth, async (req, res) => {
|
|||
if (endpoint !== 'gptPlugins') return handleError(res, { text: 'Illegal request' });
|
||||
|
||||
const agentOptions = req.body?.agentOptions ?? {
|
||||
agent: 'classic',
|
||||
skipCompletion: false,
|
||||
agent: 'functions',
|
||||
skipCompletion: true,
|
||||
model: 'gpt-3.5-turbo',
|
||||
temperature: 0,
|
||||
// top_p: 1,
|
||||
|
|
|
|||
|
|
@ -51,8 +51,8 @@ const cleanupPreset = ({ preset: _preset, endpointsConfig = {} }) => {
|
|||
};
|
||||
} else if (endpoint === 'gptPlugins') {
|
||||
const agentOptions = _preset?.agentOptions ?? {
|
||||
agent: 'classic',
|
||||
skipCompletion: false,
|
||||
agent: 'functions',
|
||||
skipCompletion: true,
|
||||
model: 'gpt-3.5-turbo',
|
||||
temperature: 0,
|
||||
// top_p: 1,
|
||||
|
|
|
|||
|
|
@ -67,8 +67,8 @@ const buildDefaultConversation = ({
|
|||
};
|
||||
} else if (endpoint === 'gptPlugins') {
|
||||
const agentOptions = lastConversationSetup?.agentOptions ?? {
|
||||
agent: 'classic',
|
||||
skipCompletion: false,
|
||||
agent: 'functions',
|
||||
skipCompletion: true,
|
||||
model: 'gpt-3.5-turbo',
|
||||
temperature: 0,
|
||||
// top_p: 1,
|
||||
|
|
|
|||
|
|
@ -88,8 +88,8 @@ const useMessageHandler = () => {
|
|||
responseSender = 'ChatGPT';
|
||||
} else if (endpoint === 'gptPlugins') {
|
||||
const agentOptions = currentConversation?.agentOptions ?? {
|
||||
agent: 'classic',
|
||||
skipCompletion: false,
|
||||
agent: 'functions',
|
||||
skipCompletion: true,
|
||||
model: 'gpt-3.5-turbo',
|
||||
temperature: 0,
|
||||
// top_p: 1,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue