feat(Functions Agent): use official langchain function executor/agent for better output handling (#538)

* style(FunctionsAgent.js): remove unnecessary comments and update PREFIX variable
refactor(initializeFunctionsAgent.js): update to use initializeAgentExecutorWithOptions
deps(package.json): update langchain to v0.0.95
refactor(askGPTPlugins.js): pass endpointOption to onStart function

* fix(ChatAgent.js): handle undefined delta content in progressMessage.choices array
This commit is contained in:
Danny Avila 2023-06-19 14:15:56 -04:00 committed by GitHub
parent 49e2cdf76c
commit f84da37c9c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 47 additions and 28 deletions

View file

@ -546,7 +546,7 @@ Only respond with your conversational reply to the following User Message:
return;
}
const token = this.isChatGptModel
? progressMessage.choices[0].delta.content
? progressMessage.choices?.[0]?.delta.content
: progressMessage.choices[0].text;
// first event's delta content is always undefined
if (!token) {

View file

@ -7,8 +7,7 @@ const {
SystemMessagePromptTemplate,
HumanMessagePromptTemplate
} = require('langchain/prompts');
const PREFIX = `You are a helpful AI assistant. Objective: Resolve the user's query with provided functions.
The user is demanding a function response to the query.`;
const PREFIX = `You are a helpful AI assistant.`;
function parseOutput(message) {
if (message.additional_kwargs.function_call) {
@ -52,9 +51,9 @@ class FunctionsAgent extends Agent {
return ChatPromptTemplate.fromPromptMessages([
SystemMessagePromptTemplate.fromTemplate(`Date: ${currentDateString}\n${prefix}`),
HumanMessagePromptTemplate.fromTemplate(`{chat_history}
Query: {input}`),
new MessagesPlaceholder('agent_scratchpad')
new MessagesPlaceholder('chat_history'),
HumanMessagePromptTemplate.fromTemplate(`Query: {input}`),
new MessagesPlaceholder('agent_scratchpad'),
]);
}

View file

@ -1,32 +1,47 @@
const FunctionsAgent = require('./FunctionsAgent');
const { AgentExecutor } = require('langchain/agents');
// const FunctionsAgent = require('./FunctionsAgent');
// const { AgentExecutor, initializeAgentExecutorWithOptions } = require('langchain/agents');
const { initializeAgentExecutorWithOptions } = require('langchain/agents');
const { BufferMemory, ChatMessageHistory } = require('langchain/memory');
const initializeFunctionsAgent = async ({
tools,
model,
pastMessages,
currentDateString,
// currentDateString,
...rest
}) => {
const agent = FunctionsAgent.fromLLMAndTools(
model,
tools,
{
currentDateString,
});
// const agent = FunctionsAgent.fromLLMAndTools(
// model,
// tools,
// {
// currentDateString,
// });
const memory = new BufferMemory({
chatHistory: new ChatMessageHistory(pastMessages),
// returnMessages: true, // commenting this out retains memory
memoryKey: 'chat_history',
humanPrefix: 'User',
aiPrefix: 'Assistant',
inputKey: 'input',
outputKey: 'output'
outputKey: 'output',
returnMessages: true,
});
return AgentExecutor.fromAgentAndTools({ agent, tools, memory, ...rest });
// return AgentExecutor.fromAgentAndTools({ agent, tools, memory, ...rest });
return await initializeAgentExecutorWithOptions(
tools,
model,
{
agentType: "openai-functions",
memory,
maxIterations: 4,
...rest,
}
);
};
module.exports = initializeFunctionsAgent;

View file

@ -46,7 +46,7 @@
"jsonwebtoken": "^9.0.0",
"keyv": "^4.5.2",
"keyv-file": "^0.2.0",
"langchain": "^0.0.94",
"langchain": "^0.0.95",
"lodash": "^4.17.21",
"meilisearch": "^0.33.0",
"mongoose": "^7.1.1",

View file

@ -218,6 +218,7 @@ const ask = async ({ text, endpointOption, parentMessageId = null, conversationI
onAgentAction,
onChainEnd,
onStart,
...endpointOption,
onProgress: progressCallback.call(null, {
res,
text,

22
package-lock.json generated
View file

@ -63,7 +63,7 @@
"jsonwebtoken": "^9.0.0",
"keyv": "^4.5.2",
"keyv-file": "^0.2.0",
"langchain": "^0.0.94",
"langchain": "^0.0.95",
"lodash": "^4.17.21",
"meilisearch": "^0.33.0",
"mongoose": "^7.1.1",
@ -130,9 +130,9 @@
}
},
"api/node_modules/langchain": {
"version": "0.0.94",
"resolved": "https://registry.npmjs.org/langchain/-/langchain-0.0.94.tgz",
"integrity": "sha512-RafU2Nk005jnNgOPSi5LhXtwOhdf8nwLSWU4hRyMXwB1l3lNCwTKlYWfKwMQ9VaSEe+4fEaO8lM9yVp+y3aa9w==",
"version": "0.0.95",
"resolved": "https://registry.npmjs.org/langchain/-/langchain-0.0.95.tgz",
"integrity": "sha512-UjOr1XArTTSZB4uCayKzQs69pCahHCq/CVTc66fAymCD5uKerh8AvcemPxgRpTi8tgcdhY76j//c0e06l5aFdA==",
"dependencies": {
"@anthropic-ai/sdk": "^0.4.3",
"ansi-styles": "^5.0.0",
@ -204,6 +204,7 @@
"replicate": "^0.9.0",
"srt-parser-2": "^1.2.2",
"typeorm": "^0.3.12",
"typesense": "^1.5.3",
"weaviate-ts-client": "^1.0.0"
},
"peerDependenciesMeta": {
@ -345,6 +346,9 @@
"typeorm": {
"optional": true
},
"typesense": {
"optional": true
},
"weaviate-ts-client": {
"optional": true
}
@ -29549,7 +29553,7 @@
"jsonwebtoken": "^9.0.0",
"keyv": "^4.5.2",
"keyv-file": "^0.2.0",
"langchain": "^0.0.94",
"langchain": "0.0.95",
"lodash": "^4.17.21",
"meilisearch": "^0.33.0",
"mongoose": "^7.1.1",
@ -29592,9 +29596,9 @@
}
},
"langchain": {
"version": "0.0.94",
"resolved": "https://registry.npmjs.org/langchain/-/langchain-0.0.94.tgz",
"integrity": "sha512-RafU2Nk005jnNgOPSi5LhXtwOhdf8nwLSWU4hRyMXwB1l3lNCwTKlYWfKwMQ9VaSEe+4fEaO8lM9yVp+y3aa9w==",
"version": "0.0.95",
"resolved": "https://registry.npmjs.org/langchain/-/langchain-0.0.95.tgz",
"integrity": "sha512-UjOr1XArTTSZB4uCayKzQs69pCahHCq/CVTc66fAymCD5uKerh8AvcemPxgRpTi8tgcdhY76j//c0e06l5aFdA==",
"requires": {
"@anthropic-ai/sdk": "^0.4.3",
"ansi-styles": "^5.0.0",
@ -29644,7 +29648,7 @@
"@radix-ui/react-icons": "^1.3.0",
"@radix-ui/react-label": "^2.0.0",
"@radix-ui/react-slider": "^1.1.1",
"@radix-ui/react-switch": "*",
"@radix-ui/react-switch": "^1.0.3",
"@radix-ui/react-tabs": "^1.0.3",
"@tailwindcss/forms": "^0.5.3",
"@tanstack/react-query": "^4.28.0",