mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-28 06:08:50 +01:00
🗃️ feat: General File Support for OpenAI, Azure, Custom, Anthropic and Google (RAG) (#2143)
* refactor: re-purpose `resendImages` as `resendFiles` * refactor: re-purpose `resendImages` as `resendFiles` * feat: upload general files * feat: embed file during upload * feat: delete file embeddings on file deletion * chore(fileConfig): add epub+zip type * feat(encodeAndFormat): handle non-image files * feat(createContextHandlers): build context prompt from file attachments and successful RAG * fix: prevent non-temp files as well as embedded files to be deleted on new conversation * fix: remove temp_file_id on usage, prevent non-temp files as well as embedded files to be deleted on new conversation * fix: prevent non-temp files as well as embedded files to be deleted on new conversation * feat(OpenAI/Anthropic/Google): basic RAG support * fix: delete `resendFiles` only when true (Default) * refactor(RAG): update endpoints and pass JWT * fix(resendFiles): default values * fix(context/processFile): query unique ids only * feat: rag-api.yaml * feat: file upload improved ux for longer uploads * chore: await embed call and catch embedding errors * refactor: store augmentedPrompt in Client * refactor(processFileUpload): throw error if not assistant file upload * fix(useFileHandling): handle markdown empty mimetype issue * chore: necessary compose file changes
This commit is contained in:
parent
af347cccde
commit
f7761df52c
38 changed files with 683 additions and 261 deletions
|
|
@ -312,6 +312,7 @@ export interface ExtendedFile {
|
|||
progress: number;
|
||||
source?: FileSources;
|
||||
attached?: boolean;
|
||||
embedded?: boolean;
|
||||
}
|
||||
|
||||
export type ContextType = { navVisible: boolean; setNavVisible: (visible: boolean) => void };
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import type { TFile } from 'librechat-data-provider';
|
|||
import type { ExtendedFile } from '~/common';
|
||||
import FileIcon from '~/components/svg/Files/FileIcon';
|
||||
import ProgressCircle from './ProgressCircle';
|
||||
import { useProgress } from '~/hooks';
|
||||
import { cn } from '~/utils';
|
||||
|
||||
const FilePreview = ({
|
||||
|
|
@ -19,7 +20,7 @@ const FilePreview = ({
|
|||
}) => {
|
||||
const radius = 55; // Radius of the SVG circle
|
||||
const circumference = 2 * Math.PI * radius;
|
||||
const progress = file?.['progress'] ?? 1;
|
||||
const progress = useProgress(file?.['progress'] ?? 1, 0.001);
|
||||
|
||||
// Calculate the offset based on the loading progress
|
||||
const offset = circumference - progress * circumference;
|
||||
|
|
|
|||
|
|
@ -34,11 +34,12 @@ export default function Presentation({
|
|||
const filesToDelete = localStorage.getItem('filesToDelete');
|
||||
const map = JSON.parse(filesToDelete ?? '{}') as Record<string, ExtendedFile>;
|
||||
const files = Object.values(map)
|
||||
.filter((file) => file.filepath)
|
||||
.filter((file) => file.filepath && file.source && !file.embedded && file.temp_file_id)
|
||||
.map((file) => ({
|
||||
file_id: file.file_id,
|
||||
filepath: file.filepath as string,
|
||||
source: file.source as FileSources,
|
||||
embedded: !!file.embedded,
|
||||
}));
|
||||
|
||||
if (files.length === 0) {
|
||||
|
|
|
|||
|
|
@ -21,16 +21,8 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
|||
if (!conversation) {
|
||||
return null;
|
||||
}
|
||||
const {
|
||||
model,
|
||||
modelLabel,
|
||||
promptPrefix,
|
||||
temperature,
|
||||
topP,
|
||||
topK,
|
||||
maxOutputTokens,
|
||||
resendImages,
|
||||
} = conversation;
|
||||
const { model, modelLabel, promptPrefix, temperature, topP, topK, maxOutputTokens, resendFiles } =
|
||||
conversation;
|
||||
|
||||
const setModel = setOption('model');
|
||||
const setModelLabel = setOption('modelLabel');
|
||||
|
|
@ -39,7 +31,7 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
|||
const setTopP = setOption('topP');
|
||||
const setTopK = setOption('topK');
|
||||
const setMaxOutputTokens = setOption('maxOutputTokens');
|
||||
const setResendImages = setOption('resendImages');
|
||||
const setResendFiles = setOption('resendFiles');
|
||||
|
||||
return (
|
||||
<div className="grid grid-cols-5 gap-6">
|
||||
|
|
@ -257,13 +249,13 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
|||
<HoverCard openDelay={500}>
|
||||
<HoverCardTrigger className="grid w-full">
|
||||
<div className="flex justify-between">
|
||||
<Label htmlFor="resend-images" className="text-left text-sm font-medium">
|
||||
{localize('com_endpoint_plug_resend_images')}{' '}
|
||||
<Label htmlFor="resend-files" className="text-left text-sm font-medium">
|
||||
{localize('com_endpoint_plug_resend_files')}{' '}
|
||||
</Label>
|
||||
<Switch
|
||||
id="resend-images"
|
||||
checked={resendImages ?? false}
|
||||
onCheckedChange={(checked: boolean) => setResendImages(checked)}
|
||||
id="resend-files"
|
||||
checked={resendFiles ?? true}
|
||||
onCheckedChange={(checked: boolean) => setResendFiles(checked)}
|
||||
disabled={readonly}
|
||||
className="flex"
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
|||
top_p: topP,
|
||||
frequency_penalty: freqP,
|
||||
presence_penalty: presP,
|
||||
resendImages,
|
||||
resendFiles,
|
||||
imageDetail,
|
||||
} = conversation ?? {};
|
||||
const [setChatGptLabel, chatGptLabelValue] = useDebouncedInput({
|
||||
|
|
@ -67,7 +67,7 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
|||
}
|
||||
|
||||
const setModel = setOption('model');
|
||||
const setResendImages = setOption('resendImages');
|
||||
const setResendFiles = setOption('resendFiles');
|
||||
const setImageDetail = setOption('imageDetail');
|
||||
|
||||
const optionEndpoint = endpointType ?? endpoint;
|
||||
|
|
@ -283,10 +283,10 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
|||
<div className="w-full">
|
||||
<div className="mb-2 flex w-full justify-between gap-2">
|
||||
<label
|
||||
htmlFor="resend-images"
|
||||
htmlFor="resend-files"
|
||||
className="text-sm font-medium leading-none peer-disabled:cursor-not-allowed peer-disabled:opacity-70 dark:text-gray-50"
|
||||
>
|
||||
<small>{localize('com_endpoint_plug_resend_images')}</small>
|
||||
<small>{localize('com_endpoint_plug_resend_files')}</small>
|
||||
</label>
|
||||
<label
|
||||
htmlFor="image-detail-value"
|
||||
|
|
@ -310,9 +310,9 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
|||
<HoverCard openDelay={500}>
|
||||
<HoverCardTrigger>
|
||||
<Switch
|
||||
id="resend-images"
|
||||
checked={resendImages ?? false}
|
||||
onCheckedChange={(checked: boolean) => setResendImages(checked)}
|
||||
id="resend-files"
|
||||
checked={resendFiles ?? true}
|
||||
onCheckedChange={(checked: boolean) => setResendFiles(checked)}
|
||||
disabled={readonly}
|
||||
className="flex"
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ const openAI = {
|
|||
topp: 'com_endpoint_openai_topp',
|
||||
freq: 'com_endpoint_openai_freq',
|
||||
pres: 'com_endpoint_openai_pres',
|
||||
resend: 'com_endpoint_openai_resend',
|
||||
resend: 'com_endpoint_openai_resend_files',
|
||||
detail: 'com_endpoint_openai_detail',
|
||||
};
|
||||
|
||||
|
|
|
|||
34
client/src/hooks/Files/useDelayedUploadToast.ts
Normal file
34
client/src/hooks/Files/useDelayedUploadToast.ts
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
import { useState } from 'react';
|
||||
import { useToastContext } from '~/Providers/ToastContext';
|
||||
import useLocalize from '~/hooks/useLocalize';
|
||||
|
||||
export const useDelayedUploadToast = () => {
|
||||
const localize = useLocalize();
|
||||
const { showToast } = useToastContext();
|
||||
const [uploadTimers, setUploadTimers] = useState({});
|
||||
|
||||
const startUploadTimer = (fileId: string, fileName: string) => {
|
||||
const timer = setTimeout(() => {
|
||||
const message = localize('com_ui_upload_delay', fileName);
|
||||
showToast({
|
||||
message,
|
||||
status: 'warning',
|
||||
duration: 7000,
|
||||
});
|
||||
}, 3000); // 3 seconds delay
|
||||
|
||||
setUploadTimers((prev) => ({ ...prev, [fileId]: timer }));
|
||||
};
|
||||
|
||||
const clearUploadTimer = (fileId: string) => {
|
||||
if (uploadTimers[fileId]) {
|
||||
clearTimeout(uploadTimers[fileId]);
|
||||
setUploadTimers((prev) => {
|
||||
const { [fileId]: _, ...rest } = prev as Record<string, unknown>;
|
||||
return rest;
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return { startUploadTimer, clearUploadTimer };
|
||||
};
|
||||
|
|
@ -48,6 +48,7 @@ const useFileDeletion = ({
|
|||
temp_file_id = '',
|
||||
filepath = '',
|
||||
source = FileSources.local,
|
||||
embedded,
|
||||
attached,
|
||||
} = _file as TFile & { attached?: boolean };
|
||||
|
||||
|
|
@ -58,6 +59,7 @@ const useFileDeletion = ({
|
|||
}
|
||||
const file: BatchFile = {
|
||||
file_id,
|
||||
embedded,
|
||||
filepath,
|
||||
source,
|
||||
};
|
||||
|
|
@ -89,12 +91,13 @@ const useFileDeletion = ({
|
|||
const deleteFiles = useCallback(
|
||||
({ files, setFiles }: { files: ExtendedFile[] | TFile[]; setFiles?: FileMapSetter }) => {
|
||||
const batchFiles = files.map((_file) => {
|
||||
const { file_id, filepath = '', source = FileSources.local } = _file;
|
||||
const { file_id, embedded, filepath = '', source = FileSources.local } = _file;
|
||||
|
||||
return {
|
||||
source,
|
||||
file_id,
|
||||
filepath,
|
||||
source,
|
||||
embedded,
|
||||
};
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import {
|
|||
} from 'librechat-data-provider';
|
||||
import type { ExtendedFile, FileSetter } from '~/common';
|
||||
import { useUploadFileMutation, useGetFileConfig } from '~/data-provider';
|
||||
import { useDelayedUploadToast } from './useDelayedUploadToast';
|
||||
import { useToastContext } from '~/Providers/ToastContext';
|
||||
import { useChatContext } from '~/Providers/ChatContext';
|
||||
import useUpdateFiles from './useUpdateFiles';
|
||||
|
|
@ -24,6 +25,7 @@ type UseFileHandling = {
|
|||
const useFileHandling = (params?: UseFileHandling) => {
|
||||
const { showToast } = useToastContext();
|
||||
const [errors, setErrors] = useState<string[]>([]);
|
||||
const { startUploadTimer, clearUploadTimer } = useDelayedUploadToast();
|
||||
const { files, setFiles, setFilesLoading, conversation } = useChatContext();
|
||||
const setError = (error: string) => setErrors((prevErrors) => [...prevErrors, error]);
|
||||
const { addFile, replaceFile, updateFileById, deleteFileById } = useUpdateFiles(
|
||||
|
|
@ -72,6 +74,7 @@ const useFileHandling = (params?: UseFileHandling) => {
|
|||
|
||||
const uploadFile = useUploadFileMutation({
|
||||
onSuccess: (data) => {
|
||||
clearUploadTimer(data.temp_file_id);
|
||||
console.log('upload success', data);
|
||||
updateFileById(
|
||||
data.temp_file_id,
|
||||
|
|
@ -95,6 +98,7 @@ const useFileHandling = (params?: UseFileHandling) => {
|
|||
width: data.width,
|
||||
filename: data.filename,
|
||||
source: data.source,
|
||||
embedded: data.embedded,
|
||||
},
|
||||
params?.additionalMetadata?.assistant_id ? true : false,
|
||||
);
|
||||
|
|
@ -103,6 +107,7 @@ const useFileHandling = (params?: UseFileHandling) => {
|
|||
onError: (error, body) => {
|
||||
console.log('upload error', error);
|
||||
const file_id = body.get('file_id');
|
||||
clearUploadTimer(file_id as string);
|
||||
deleteFileById(file_id as string);
|
||||
setError(
|
||||
(error as { response: { data: { message?: string } } })?.response?.data?.message ??
|
||||
|
|
@ -117,6 +122,8 @@ const useFileHandling = (params?: UseFileHandling) => {
|
|||
return;
|
||||
}
|
||||
|
||||
startUploadTimer(extendedFile.file_id, extendedFile.file?.name || 'File');
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append('file', extendedFile.file as File);
|
||||
formData.append('file_id', extendedFile.file_id);
|
||||
|
|
@ -159,7 +166,27 @@ const useFileHandling = (params?: UseFileHandling) => {
|
|||
}
|
||||
|
||||
for (let i = 0; i < fileList.length; i++) {
|
||||
const originalFile = fileList[i];
|
||||
let originalFile = fileList[i];
|
||||
let fileType = originalFile.type;
|
||||
|
||||
// Infer MIME type for Markdown files when the type is empty
|
||||
if (!fileType && originalFile.name.endsWith('.md')) {
|
||||
fileType = 'text/markdown';
|
||||
}
|
||||
|
||||
// Check if the file type is still empty after the extension check
|
||||
if (!fileType) {
|
||||
setError('Unable to determine file type for: ' + originalFile.name);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Replace empty type with inferred type
|
||||
if (originalFile.type !== fileType) {
|
||||
const newFile = new File([originalFile], originalFile.name, { type: fileType });
|
||||
originalFile = newFile;
|
||||
fileList[i] = newFile;
|
||||
}
|
||||
|
||||
if (!checkType(originalFile.type, supportedMimeTypes)) {
|
||||
console.log(originalFile);
|
||||
setError('Currently, unsupported file type: ' + originalFile.type);
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import { useState, useEffect } from 'react';
|
||||
|
||||
export default function useProgress(initialProgress = 0.01) {
|
||||
export default function useProgress(initialProgress = 0.01, increment = 0.007) {
|
||||
const [incrementValue] = useState(increment);
|
||||
const [progress, setProgress] = useState(initialProgress);
|
||||
|
||||
useEffect(() => {
|
||||
|
|
@ -20,7 +21,7 @@ export default function useProgress(initialProgress = 0.01) {
|
|||
clearInterval(timer);
|
||||
return 1;
|
||||
}
|
||||
return Math.min(prevProgress + 0.007, 0.95);
|
||||
return Math.min(prevProgress + incrementValue, 0.95);
|
||||
});
|
||||
}, 200);
|
||||
}
|
||||
|
|
@ -29,7 +30,7 @@ export default function useProgress(initialProgress = 0.01) {
|
|||
clearInterval(timer);
|
||||
clearTimeout(timeout);
|
||||
};
|
||||
}, [progress, initialProgress]);
|
||||
}, [progress, initialProgress, incrementValue]);
|
||||
|
||||
return progress;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -165,9 +165,10 @@ const useNewConvo = (index = 0) => {
|
|||
|
||||
if (conversation.conversationId === 'new' && !modelsData) {
|
||||
const filesToDelete = Array.from(files.values())
|
||||
.filter((file) => file.filepath && file.source)
|
||||
.filter((file) => file.filepath && file.source && !file.embedded && file.temp_file_id)
|
||||
.map((file) => ({
|
||||
file_id: file.file_id,
|
||||
embedded: !!file.embedded,
|
||||
filepath: file.filepath as string,
|
||||
source: file.source as FileSources, // Ensure that the source is of type FileSources
|
||||
}));
|
||||
|
|
|
|||
|
|
@ -102,6 +102,8 @@ export default {
|
|||
com_ui_preview: 'Preview',
|
||||
com_ui_upload: 'Upload',
|
||||
com_ui_connect: 'Connect',
|
||||
com_ui_upload_delay:
|
||||
'Uploading "{0}" is taking more time than anticipated. Please wait while the file finishes indexing for retrieval.',
|
||||
com_ui_privacy_policy: 'Privacy policy',
|
||||
com_ui_terms_of_service: 'Terms of service',
|
||||
com_auth_error_login:
|
||||
|
|
@ -218,6 +220,8 @@ export default {
|
|||
'Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model\'s likelihood to talk about new topics.',
|
||||
com_endpoint_openai_resend:
|
||||
'Resend all previously attached images. Note: this can significantly increase token cost and you may experience errors with many image attachments.',
|
||||
com_endpoint_openai_resend_files:
|
||||
'Resend all previously attached files. Note: this will increase token cost and you may experience errors with many attachments.',
|
||||
com_endpoint_openai_detail:
|
||||
'The resolution for Vision requests. "Low" is cheaper and faster, "High" is more detailed and expensive, and "Auto" will automatically choose between the two based on the image resolution.',
|
||||
com_endpoint_openai_custom_name_placeholder: 'Set a custom name for ChatGPT',
|
||||
|
|
@ -235,6 +239,7 @@ export default {
|
|||
com_endpoint_frequency_penalty: 'Frequency Penalty',
|
||||
com_endpoint_presence_penalty: 'Presence Penalty',
|
||||
com_endpoint_plug_use_functions: 'Use Functions',
|
||||
com_endpoint_plug_resend_files: 'Resend Files',
|
||||
com_endpoint_plug_resend_images: 'Resend Images',
|
||||
com_endpoint_plug_image_detail: 'Image Detail',
|
||||
com_endpoint_plug_skip_completion: 'Skip Completion',
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ export const convoData: ConversationData = {
|
|||
model: 'gpt-3.5-turbo-0125',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'A Long Story',
|
||||
top_p: 1,
|
||||
|
|
@ -51,7 +51,7 @@ export const convoData: ConversationData = {
|
|||
model: 'meta-llama/llama-2-13b-chat',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'How Are You Doing?',
|
||||
top_p: 1,
|
||||
|
|
@ -69,7 +69,7 @@ export const convoData: ConversationData = {
|
|||
model: 'gpt-3.5-turbo-0301',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'A Long Story',
|
||||
top_p: 1,
|
||||
|
|
@ -94,7 +94,7 @@ export const convoData: ConversationData = {
|
|||
model: 'gpt-3.5-turbo-0301',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'Write Einstein\'s Famous Equation in LaTeX',
|
||||
top_p: 1,
|
||||
|
|
@ -120,7 +120,7 @@ export const convoData: ConversationData = {
|
|||
model: 'gpt-3.5-turbo-0301',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'Fibonacci Solver in Python',
|
||||
top_p: 1,
|
||||
|
|
@ -153,7 +153,7 @@ export const convoData: ConversationData = {
|
|||
model: 'gpt-3.5-turbo-0301',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'test',
|
||||
top_p: 1,
|
||||
|
|
@ -175,7 +175,7 @@ export const convoData: ConversationData = {
|
|||
model: 'gpt-3.5-turbo-0125',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'Hello! How can I assist you today?',
|
||||
top_p: 1,
|
||||
|
|
@ -205,7 +205,7 @@ export const convoData: ConversationData = {
|
|||
model: 'gpt-4-0125-preview',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'test',
|
||||
top_p: 1,
|
||||
|
|
@ -227,7 +227,7 @@ export const convoData: ConversationData = {
|
|||
model: 'gpt-3.5-turbo-0301',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'Hello! How can I assist you today?',
|
||||
top_p: 1,
|
||||
|
|
@ -263,7 +263,7 @@ export const convoData: ConversationData = {
|
|||
model: 'gpt-3.5-turbo-0301',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'test',
|
||||
top_p: 1,
|
||||
|
|
@ -286,7 +286,7 @@ export const convoData: ConversationData = {
|
|||
model: 'gpt-3.5-turbo-0301',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'Hello, How Can I Help You?',
|
||||
top_p: 1,
|
||||
|
|
@ -304,7 +304,7 @@ export const convoData: ConversationData = {
|
|||
model: 'gpt-3.5-turbo-0301',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'Hello there! How may I assist you today?',
|
||||
top_p: 1,
|
||||
|
|
@ -322,7 +322,7 @@ export const convoData: ConversationData = {
|
|||
model: 'gpt-3.5-turbo-0301',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'Hello! How may I assist you today?',
|
||||
top_p: 1,
|
||||
|
|
@ -340,7 +340,7 @@ export const convoData: ConversationData = {
|
|||
model: 'gpt-3.5-turbo-0301',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'Hello! How can I assist you today?',
|
||||
top_p: 1,
|
||||
|
|
@ -358,7 +358,7 @@ export const convoData: ConversationData = {
|
|||
model: 'gpt-3.5-turbo-0301',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'Hello! How may I assist you today?',
|
||||
top_p: 1,
|
||||
|
|
@ -376,7 +376,7 @@ export const convoData: ConversationData = {
|
|||
model: 'gpt-3.5-turbo-0301',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'Hello! How can I assist you today?',
|
||||
top_p: 1,
|
||||
|
|
@ -394,7 +394,7 @@ export const convoData: ConversationData = {
|
|||
model: 'gpt-3.5-turbo-0301',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'Hello! How can I assist you today?',
|
||||
top_p: 1,
|
||||
|
|
@ -412,7 +412,7 @@ export const convoData: ConversationData = {
|
|||
model: 'gpt-3.5-turbo-0301',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'Hello! How can I assist you today?',
|
||||
top_p: 1,
|
||||
|
|
@ -430,7 +430,7 @@ export const convoData: ConversationData = {
|
|||
model: 'gpt-3.5-turbo-0301',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'Hello! How can I assist you today?',
|
||||
top_p: 1,
|
||||
|
|
@ -448,7 +448,7 @@ export const convoData: ConversationData = {
|
|||
model: 'gpt-3.5-turbo-0301',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'Hello! How can I assist you today?',
|
||||
top_p: 1,
|
||||
|
|
@ -466,7 +466,7 @@ export const convoData: ConversationData = {
|
|||
model: 'gpt-3.5-turbo-0301',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'Hello! How can I assist you today?',
|
||||
top_p: 1,
|
||||
|
|
@ -484,7 +484,7 @@ export const convoData: ConversationData = {
|
|||
model: 'gpt-3.5-turbo-0301',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'Hello! How can I assist you today?',
|
||||
top_p: 1,
|
||||
|
|
@ -502,7 +502,7 @@ export const convoData: ConversationData = {
|
|||
model: 'gpt-3.5-turbo-0301',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'Hello! How can I assist you today?',
|
||||
top_p: 1,
|
||||
|
|
@ -525,7 +525,7 @@ export const convoData: ConversationData = {
|
|||
model: 'gpt-3.5-turbo-0301',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'test 2',
|
||||
top_p: 1,
|
||||
|
|
@ -550,7 +550,7 @@ export const convoData: ConversationData = {
|
|||
model: 'gpt-3.5-turbo-0301',
|
||||
presence_penalty: 0,
|
||||
promptPrefix: null,
|
||||
resendImages: false,
|
||||
resendFiles: false,
|
||||
temperature: 1,
|
||||
title: 'Hi there! How can I assist you today?',
|
||||
top_p: 1,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue