refactor(PluginsClient.js): simplify getFunctionModelName logic using if-else statements

refactor(PluginsClient.js): improve readability by extracting observedImagePath variable
fix(PluginsClient.js): check if responseMessage already includes observedImagePath before appending observation
This commit is contained in:
Danny Avila 2023-07-23 14:31:50 -04:00 committed by Danny Avila
parent 197307d514
commit 18c4883ae0

View file

@ -132,14 +132,13 @@ Only respond with your conversational reply to the following User Message:
}
getFunctionModelName(input) {
const prefixMap = {
'gpt-4': 'gpt-4-0613',
'gpt-4-32k': 'gpt-4-32k-0613',
'gpt-3.5-turbo': 'gpt-3.5-turbo-0613',
};
const prefix = Object.keys(prefixMap).find((key) => input.startsWith(key));
return prefix ? prefixMap[prefix] : 'gpt-3.5-turbo-0613';
if (input.startsWith('gpt-3.5-turbo')) {
return 'gpt-3.5-turbo';
} else if (input.startsWith('gpt-4')) {
return 'gpt-4';
} else {
return 'gpt-3.5-turbo';
}
}
getBuildMessagesOptions(opts) {
@ -184,7 +183,9 @@ Only respond with your conversational reply to the following User Message:
const model = this.createLLM(modelOptions, configOptions);
if (this.options.debug) {
console.debug(`<-----Agent Model: ${model.modelName} | Temp: ${model.temperature}----->`);
console.debug(
`<-----Agent Model: ${model.modelName} | Temp: ${model.temperature} | Functions: ${this.functionsAgent}----->`,
);
}
this.availableTools = await loadTools({
@ -328,7 +329,12 @@ Only respond with your conversational reply to the following User Message:
return;
}
if (!responseMessage.text.includes(observation)) {
// Extract the image file path from the observation
const observedImagePath = observation.match(/\(\/images\/.*\.\w*\)/g)[0];
// Check if the responseMessage already includes the image file path
if (!responseMessage.text.includes(observedImagePath)) {
// If the image file path is not found, append the whole observation
responseMessage.text += '\n' + observation;
if (this.options.debug) {
console.debug('added image from intermediateSteps');