mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-17 00:40:14 +01:00
refactor(FunctionsAgent.js): change var to const in plan function
This commit is contained in:
parent
198f60c536
commit
6e183b91e1
1 changed files with 9 additions and 9 deletions
|
|
@ -88,28 +88,28 @@ Query: {input}
|
||||||
|
|
||||||
async plan(steps, inputs, callbackManager) {
|
async plan(steps, inputs, callbackManager) {
|
||||||
// Add scratchpad and stop to inputs
|
// Add scratchpad and stop to inputs
|
||||||
var thoughts = await this.constructScratchPad(steps);
|
const thoughts = await this.constructScratchPad(steps);
|
||||||
var newInputs = Object.assign({}, inputs, { agent_scratchpad: thoughts });
|
const newInputs = Object.assign({}, inputs, { agent_scratchpad: thoughts });
|
||||||
if (this._stop().length !== 0) {
|
if (this._stop().length !== 0) {
|
||||||
newInputs.stop = this._stop();
|
newInputs.stop = this._stop();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Split inputs between prompt and llm
|
// Split inputs between prompt and llm
|
||||||
var llm = this.llmChain.llm;
|
const llm = this.llmChain.llm;
|
||||||
var valuesForPrompt = Object.assign({}, newInputs);
|
const valuesForPrompt = Object.assign({}, newInputs);
|
||||||
var valuesForLLM = {
|
const valuesForLLM = {
|
||||||
tools: this.tools
|
tools: this.tools
|
||||||
};
|
};
|
||||||
for (var i = 0; i < this.llmChain.llm.callKeys.length; i++) {
|
for (let i = 0; i < this.llmChain.llm.callKeys.length; i++) {
|
||||||
var key = this.llmChain.llm.callKeys[i];
|
const key = this.llmChain.llm.callKeys[i];
|
||||||
if (key in inputs) {
|
if (key in inputs) {
|
||||||
valuesForLLM[key] = inputs[key];
|
valuesForLLM[key] = inputs[key];
|
||||||
delete valuesForPrompt[key];
|
delete valuesForPrompt[key];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var promptValue = await this.llmChain.prompt.formatPromptValue(valuesForPrompt);
|
const promptValue = await this.llmChain.prompt.formatPromptValue(valuesForPrompt);
|
||||||
var message = await llm.predictMessages(
|
const message = await llm.predictMessages(
|
||||||
promptValue.toChatMessages(),
|
promptValue.toChatMessages(),
|
||||||
valuesForLLM,
|
valuesForLLM,
|
||||||
callbackManager
|
callbackManager
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue