feat: user_provided token for plugins, temp remove user_provided azure creds for Plugins until solution is made

This commit is contained in:
Danny Avila 2023-07-06 13:18:06 -04:00 committed by Danny Avila
parent 5b30ab5d43
commit 5ad0ef331f
7 changed files with 29 additions and 18 deletions

View file

@ -244,7 +244,8 @@ class OpenAIClient extends BaseClient {
// TODO: need to handle interleaving instructions better
if (this.contextStrategy) {
({ payload, tokenCountMap, promptTokens, messages } = await this.handleContextStrategy({instructions, orderedMessages, formattedMessages}));
({ payload, tokenCountMap, promptTokens, messages } =
await this.handleContextStrategy({instructions, orderedMessages, formattedMessages}));
}
const result = {
@ -271,7 +272,8 @@ class OpenAIClient extends BaseClient {
if (progressMessage === '[DONE]') {
return;
}
const token = this.isChatCompletion ? progressMessage.choices?.[0]?.delta?.content : progressMessage.choices?.[0]?.text;
const token =
this.isChatCompletion ? progressMessage.choices?.[0]?.delta?.content : progressMessage.choices?.[0]?.text;
// first event's delta content is always undefined
if (!token) {
return;

View file

@ -86,7 +86,7 @@ class PluginsClient extends OpenAIClient {
const preliminaryAnswer =
result.output?.length > 0 ? `Preliminary Answer: "${result.output.trim()}"` : '';
const prefix = preliminaryAnswer
? `review and improve the answer you generated using plugins in response to the User Message below. The user hasn't seen your answer or thoughts yet.`
? 'review and improve the answer you generated using plugins in response to the User Message below. The user hasn\'t seen your answer or thoughts yet.'
: 'respond to the User Message below based on your preliminary thoughts & actions.';
return `As a helpful AI Assistant, ${prefix}${errorMessage}\n${internalActions}
@ -153,16 +153,21 @@ Only respond with your conversational reply to the following User Message:
createLLM(modelOptions, configOptions) {
let credentials = { openAIApiKey: this.openAIApiKey };
let configuration = {
apiKey: this.openAIApiKey,
};
if (this.azure) {
credentials = { ...this.azure };
credentials = {};
configuration = {};
}
if (this.options.debug) {
console.debug('createLLM: configOptions');
console.debug(configOptions);
console.debug(configOptions, credentials);
}
return new ChatOpenAI({ credentials, ...modelOptions }, configOptions);
return new ChatOpenAI({ credentials, configuration, ...modelOptions }, configOptions);
}
async initialize({ user, message, onAgentAction, onChainEnd, signal }) {
@ -525,7 +530,7 @@ Only respond with your conversational reply to the following User Message:
currentTokenCount += 2;
if (this.isGpt3 && messagePayload.content.length > 0) {
const context = `Chat History:\n`;
const context = 'Chat History:\n';
messagePayload.content = `${context}${prompt}`;
currentTokenCount += this.getTokenCount(context);
}

View file

@ -23,7 +23,7 @@ const initializeFunctionsAgent = async ({
tools,
model,
{
agentType: "openai-functions",
agentType: 'openai-functions',
memory,
...rest,
}

View file

@ -167,11 +167,16 @@ const ask = async ({ text, endpoint, endpointOption, parentMessageId = null, con
...endpointOption
};
if (process.env.PLUGINS_USE_AZURE === 'true') {
let oaiApiKey = req.body?.token ?? process.env.OPENAI_API_KEY;
if (process.env.PLUGINS_USE_AZURE) {
clientOptions.azure = getAzureCredentials();
oaiApiKey = clientOptions.azure.azureOpenAIApiKey;
}
const oaiApiKey = req.body?.token ?? process.env.OPENAI_API_KEY;
if (oaiApiKey && oaiApiKey.includes('azure') && !clientOptions.azure) {
clientOptions.azure = JSON.parse(req.body?.token) ?? getAzureCredentials();
oaiApiKey = clientOptions.azure.azureOpenAIApiKey;
}
const chatAgent = new PluginsClient(oaiApiKey, clientOptions);
const onAgentAction = (action) => {

View file

@ -12,7 +12,7 @@ const genAzureChatCompletion = ({
const getAzureCredentials = () => {
return {
azureOpenAIApiKey: process.env.AZURE_API_KEY,
azureOpenAIApiKey: process.env.AZURE_API_KEY ?? process.env.AZURE_OPENAI_API_KEY,
azureOpenAIApiInstanceName: process.env.AZURE_OPENAI_API_INSTANCE_NAME,
azureOpenAIApiDeploymentName: process.env.AZURE_OPENAI_API_DEPLOYMENT_NAME,
azureOpenAIApiVersion: process.env.AZURE_OPENAI_API_VERSION

View file

@ -1,7 +1,8 @@
/* eslint-disable react-hooks/exhaustive-deps */
import React, { useEffect, useState } from 'react';
import * as Checkbox from '@radix-ui/react-checkbox';
import { CheckIcon } from '@radix-ui/react-icons';
// TODO: Temporarily remove checkbox until Plugins solution for Azure is figured out
// import * as Checkbox from '@radix-ui/react-checkbox';
// import { CheckIcon } from '@radix-ui/react-icons';
import InputWithLabel from './InputWithLabel';
import store from '~/store';
@ -98,7 +99,7 @@ const OpenAIConfig = ({ token, setToken, endpoint } : OpenAIConfigProps) => {
/>
</>
)}
{ endpoint === 'gptPlugins' && (
{/* { endpoint === 'gptPlugins' && (
<div className="flex items-center">
<Checkbox.Root
className="flex h-[20px] w-[20px] appearance-none items-center justify-center rounded-[4px] bg-gray-100 text-white outline-none hover:bg-gray-200 dark:bg-gray-700 dark:hover:bg-gray-900"
@ -118,7 +119,7 @@ const OpenAIConfig = ({ token, setToken, endpoint } : OpenAIConfigProps) => {
Use Azure OpenAI.
</label>
</div>
)}
)} */}
</>
);
};

View file

@ -92,9 +92,6 @@ const useMessageHandler = () => {
skipCompletion: true,
model: 'gpt-3.5-turbo',
temperature: 0,
// top_p: 1,
// presence_penalty: 0,
// frequency_penalty: 0
};
endpointOption = {
endpoint,
@ -109,6 +106,7 @@ const useMessageHandler = () => {
top_p: currentConversation?.top_p ?? 1,
presence_penalty: currentConversation?.presence_penalty ?? 0,
frequency_penalty: currentConversation?.frequency_penalty ?? 0,
token: endpointsConfig[endpoint]?.userProvide ? getToken() : null,
agentOptions
};
responseSender = 'ChatGPT';