🚧 chore: merge latest dev build (#4288)

* fix: agent initialization, add `collectedUsage` handling

* style: improve side panel styling

* refactor(loadAgent): Optimize order agent project ID retrieval

* feat: code execution

* fix: typing issues

* feat: ExecuteCode content part

* refactor: use local state for default collapsed state of analysis content parts

* fix: code parsing in ExecuteCode component

* chore: bump agents package, export loadAuthValues

* refactor: Update handleTools.js to use EnvVar for code execution tool authentication

* WIP

* feat: download code outputs

* fix(useEventHandlers): type issues

* feat: backend handling for code outputs

* Refactor: Remove console.log statement in Part.tsx

* refactor: add attachments to TMessage/messageSchema

* WIP: prelim handling for code outputs

* feat: attachments rendering

* refactor: improve attachments rendering

* fix: attachments, nullish edge case, handle attachments from event stream, bump agents package

* fix filename download

* fix: tool assignment for 'run code' on agent creation

* fix: image handling by adding attachments

* refactor: prevent agent creation without provider/model

* refactor: remove unnecessary space in agent creation success message

* refactor: select first model if selecting provider from empty on form

* fix: Agent avatar bug

* fix: `defaultAgentFormValues` causing boolean typing issue and typeerror

* fix: capabilities counting as tools, causing duplication of them

* fix: formatted messages edge case where consecutive content text type parts with the latter having tool_call_ids would cause consecutive AI messages to be created. furthermore, content could not be an array for tool_use messages (anthropic limitation)

* chore: bump @librechat/agents dependency to version 1.6.9

* feat: bedrock agents

* feat: new Agents icon

* feat: agent titling

* feat: agent landing

* refactor: allow sharing agent globally only if user is admin or author

* feat: initial AgentPanelSkeleton

* feat: AgentPanelSkeleton

* feat: collaborative agents

* chore: add potential authorName as part of schema

* chore: Remove unnecessary console.log statement

* WIP: agent model parameters

* chore: ToolsDialog typing and tool related localization chnages

* refactor: update tool instance type (latest langchain class), and rename google tool to 'google' proper

* chore: add back tools

* feat: Agent knowledge files upload

* refactor: better verbiage for disabled knowledge

* chore: debug logs for file deletions

* chore: debug logs for file deletions

* feat: upload/delete agent knowledge/file-search files

* feat: file search UI for agents

* feat: first pass, file search tool

* chore: update default agent capabilities and info
This commit is contained in:
Danny Avila 2024-09-30 17:17:57 -04:00 committed by GitHub
parent f33e75e2ee
commit ad74350036
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
123 changed files with 3611 additions and 1541 deletions

View file

@ -1,12 +1,7 @@
import debounce from 'lodash/debounce';
import { FileSources, EToolResources } from 'librechat-data-provider';
import { FileSources, EToolResources, removeNullishValues } from 'librechat-data-provider';
import { useCallback, useState, useEffect } from 'react';
import type {
BatchFile,
TFile,
DeleteFilesResponse,
DeleteFilesBody,
} from 'librechat-data-provider';
import type * as t from 'librechat-data-provider';
import type { UseMutateAsyncFunction } from '@tanstack/react-query';
import type { ExtendedFile, GenericSetter } from '~/common';
import useSetFilesToDelete from './useSetFilesToDelete';
@ -15,21 +10,38 @@ type FileMapSetter = GenericSetter<Map<string, ExtendedFile>>;
const useFileDeletion = ({
mutateAsync,
agent_id,
assistant_id,
tool_resource,
}: {
mutateAsync: UseMutateAsyncFunction<DeleteFilesResponse, unknown, DeleteFilesBody, unknown>;
mutateAsync: UseMutateAsyncFunction<t.DeleteFilesResponse, unknown, t.DeleteFilesBody, unknown>;
agent_id?: string;
assistant_id?: string;
tool_resource?: EToolResources;
}) => {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const [_batch, setFileDeleteBatch] = useState<BatchFile[]>([]);
const [_batch, setFileDeleteBatch] = useState<t.BatchFile[]>([]);
const setFilesToDelete = useSetFilesToDelete();
const executeBatchDelete = useCallback(
(filesToDelete: BatchFile[], assistant_id?: string, tool_resource?: EToolResources) => {
console.log('Deleting files:', filesToDelete, assistant_id, tool_resource);
mutateAsync({ files: filesToDelete, assistant_id, tool_resource });
({
filesToDelete,
agent_id,
assistant_id,
tool_resource,
}: {
filesToDelete: t.BatchFile[];
agent_id?: string;
assistant_id?: string;
tool_resource?: EToolResources;
}) => {
const payload = removeNullishValues({
agent_id,
assistant_id,
tool_resource,
});
console.log('Deleting files:', filesToDelete, payload);
mutateAsync({ files: filesToDelete, ...payload });
setFileDeleteBatch([]);
},
[mutateAsync],
@ -44,22 +56,22 @@ const useFileDeletion = ({
}, [debouncedDelete]);
const deleteFile = useCallback(
({ file: _file, setFiles }: { file: ExtendedFile | TFile; setFiles?: FileMapSetter }) => {
({ file: _file, setFiles }: { file: ExtendedFile | t.TFile; setFiles?: FileMapSetter }) => {
const {
file_id,
temp_file_id = '',
filepath = '',
source = FileSources.local,
embedded,
attached,
} = _file as TFile & { attached?: boolean };
attached = false,
} = _file as t.TFile & { attached?: boolean };
const progress = _file['progress'] ?? 1;
if (progress < 1) {
return;
}
const file: BatchFile = {
const file: t.BatchFile = {
file_id,
embedded,
filepath,
@ -83,15 +95,20 @@ const useFileDeletion = ({
setFileDeleteBatch((prevBatch) => {
const newBatch = [...prevBatch, file];
debouncedDelete(newBatch, assistant_id, tool_resource);
debouncedDelete({
filesToDelete: newBatch,
agent_id,
assistant_id,
tool_resource,
});
return newBatch;
});
},
[debouncedDelete, setFilesToDelete, assistant_id, tool_resource],
[debouncedDelete, setFilesToDelete, agent_id, assistant_id, tool_resource],
);
const deleteFiles = useCallback(
({ files, setFiles }: { files: ExtendedFile[] | TFile[]; setFiles?: FileMapSetter }) => {
({ files, setFiles }: { files: ExtendedFile[] | t.TFile[]; setFiles?: FileMapSetter }) => {
const batchFiles = files.map((_file) => {
const { file_id, embedded, filepath = '', source = FileSources.local } = _file;
@ -117,11 +134,15 @@ const useFileDeletion = ({
setFileDeleteBatch((prevBatch) => {
const newBatch = [...prevBatch, ...batchFiles];
debouncedDelete(newBatch, assistant_id);
debouncedDelete({
filesToDelete: newBatch,
agent_id,
assistant_id,
});
return newBatch;
});
},
[debouncedDelete, setFilesToDelete, assistant_id],
[debouncedDelete, setFilesToDelete, agent_id, assistant_id],
);
return { deleteFile, deleteFiles };

View file

@ -40,6 +40,9 @@ const useFileHandling = (params?: UseFileHandling) => {
params?.fileSetter ?? setFiles,
);
const agent_id = params?.additionalMetadata?.agent_id ?? '';
const assistant_id = params?.additionalMetadata?.assistant_id ?? '';
const { data: fileConfig = defaultFileConfig } = useGetFileConfig({
select: (data) => mergeFileConfig(data),
});
@ -84,13 +87,17 @@ const useFileHandling = (params?: UseFileHandling) => {
onSuccess: (data) => {
clearUploadTimer(data.temp_file_id);
console.log('upload success', data);
if (agent_id) {
queryClient.refetchQueries([QueryKeys.agent, agent_id]);
return;
}
updateFileById(
data.temp_file_id,
{
progress: 0.9,
filepath: data.filepath,
},
params?.additionalMetadata?.assistant_id ? true : false,
assistant_id ? true : false,
);
setTimeout(() => {
@ -108,7 +115,7 @@ const useFileHandling = (params?: UseFileHandling) => {
source: data.source,
embedded: data.embedded,
},
params?.additionalMetadata?.assistant_id ? true : false,
assistant_id ? true : false,
);
}, 300);
},
@ -118,51 +125,45 @@ const useFileHandling = (params?: UseFileHandling) => {
clearUploadTimer(file_id as string);
deleteFileById(file_id as string);
setError(
(error as TError)?.response?.data?.message ?? 'An error occurred while uploading the file.',
(error as TError | undefined)?.response?.data?.message ??
'An error occurred while uploading the file.',
);
},
});
const startUpload = async (extendedFile: ExtendedFile) => {
if (!endpoint) {
setError('An error occurred while uploading the file: Endpoint is undefined');
return;
}
startUploadTimer(extendedFile.file_id, extendedFile.file?.name || 'File', extendedFile.size);
const filename = extendedFile.file?.name ?? 'File';
startUploadTimer(extendedFile.file_id, filename, extendedFile.size);
const formData = new FormData();
formData.append(
'file',
extendedFile.file as File,
encodeURIComponent(extendedFile.file?.name || 'File'),
);
formData.append('file', extendedFile.file as File, encodeURIComponent(filename));
formData.append('file_id', extendedFile.file_id);
if (extendedFile.width) {
formData.append('width', extendedFile.width?.toString());
const width = extendedFile.width ?? 0;
const height = extendedFile.height ?? 0;
if (width) {
formData.append('width', width.toString());
}
if (extendedFile.height) {
formData.append('height', extendedFile.height?.toString());
if (height) {
formData.append('height', height.toString());
}
if (params?.additionalMetadata) {
for (const [key, value] of Object.entries(params.additionalMetadata)) {
for (const [key, value = ''] of Object.entries(params.additionalMetadata)) {
if (value) {
formData.append(key, value);
}
}
}
if (
isAssistantsEndpoint(endpoint) &&
!formData.get('assistant_id') &&
conversation?.assistant_id
) {
const convoAssistantId = conversation?.assistant_id ?? '';
const convoModel = conversation?.model ?? '';
if (isAssistantsEndpoint(endpoint) && !formData.get('assistant_id') && convoAssistantId) {
const endpointsConfig = queryClient.getQueryData<TEndpointsConfig>([QueryKeys.endpoints]);
const version = endpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint];
formData.append('version', version);
formData.append('assistant_id', conversation.assistant_id);
formData.append('model', conversation?.model ?? '');
formData.append('assistant_id', convoAssistantId);
formData.append('model', convoModel);
formData.append('message_file', 'true');
}
if (isAssistantsEndpoint(endpoint) && !formData.get('version')) {
@ -238,7 +239,10 @@ const useFileHandling = (params?: UseFileHandling) => {
(file) =>
`${file.file?.name ?? file.filename}-${file.size}-${file.type?.split('/')[0] ?? 'file'}`,
),
...fileList.map((file) => `${file.name}-${file.size}-${file.type?.split('/')[0] ?? 'file'}`),
...fileList.map(
(file: File | undefined) =>
`${file?.name}-${file?.size}-${file?.type.split('/')[0] ?? 'file'}`,
),
];
const uniqueFilesSet = new Set(combinedFilesInfo);
@ -300,7 +304,7 @@ const useFileHandling = (params?: UseFileHandling) => {
addFile(extendedFile);
if (originalFile.type?.split('/')[0] === 'image') {
if (originalFile.type.split('/')[0] === 'image') {
loadImage(extendedFile, preview);
continue;
}

View file

@ -20,11 +20,7 @@ export default function useUpdateFiles(setFiles: FileSetter) {
});
};
const updateFileById = (
fileId: string,
updates: Partial<ExtendedFile>,
isAssistantFile?: boolean,
) => {
const updateFileById = (fileId: string, updates: Partial<ExtendedFile>, isEntityFile = false) => {
setFiles((currentFiles) => {
if (!currentFiles.has(fileId)) {
console.warn(`File with id ${fileId} not found.`);
@ -38,8 +34,8 @@ export default function useUpdateFiles(setFiles: FileSetter) {
return currentFiles;
}
updatedFiles.set(fileId, { ...currentFile, ...updates });
if (updates['filepath'] && updates['progress'] !== 1 && !isAssistantFile) {
const filepath = updates['filepath'] ?? '';
if (filepath && updates['progress'] !== 1 && !isEntityFile) {
const files = Object.fromEntries(updatedFiles);
setFilesToDelete(files);
}