mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-01-24 03:06:12 +01:00
🚧 chore: merge latest dev build (#4288)
* fix: agent initialization, add `collectedUsage` handling * style: improve side panel styling * refactor(loadAgent): Optimize order agent project ID retrieval * feat: code execution * fix: typing issues * feat: ExecuteCode content part * refactor: use local state for default collapsed state of analysis content parts * fix: code parsing in ExecuteCode component * chore: bump agents package, export loadAuthValues * refactor: Update handleTools.js to use EnvVar for code execution tool authentication * WIP * feat: download code outputs * fix(useEventHandlers): type issues * feat: backend handling for code outputs * Refactor: Remove console.log statement in Part.tsx * refactor: add attachments to TMessage/messageSchema * WIP: prelim handling for code outputs * feat: attachments rendering * refactor: improve attachments rendering * fix: attachments, nullish edge case, handle attachments from event stream, bump agents package * fix filename download * fix: tool assignment for 'run code' on agent creation * fix: image handling by adding attachments * refactor: prevent agent creation without provider/model * refactor: remove unnecessary space in agent creation success message * refactor: select first model if selecting provider from empty on form * fix: Agent avatar bug * fix: `defaultAgentFormValues` causing boolean typing issue and typeerror * fix: capabilities counting as tools, causing duplication of them * fix: formatted messages edge case where consecutive content text type parts with the latter having tool_call_ids would cause consecutive AI messages to be created. furthermore, content could not be an array for tool_use messages (anthropic limitation) * chore: bump @librechat/agents dependency to version 1.6.9 * feat: bedrock agents * feat: new Agents icon * feat: agent titling * feat: agent landing * refactor: allow sharing agent globally only if user is admin or author * feat: initial AgentPanelSkeleton * feat: AgentPanelSkeleton * feat: collaborative agents * chore: add potential authorName as part of schema * chore: Remove unnecessary console.log statement * WIP: agent model parameters * chore: ToolsDialog typing and tool related localization chnages * refactor: update tool instance type (latest langchain class), and rename google tool to 'google' proper * chore: add back tools * feat: Agent knowledge files upload * refactor: better verbiage for disabled knowledge * chore: debug logs for file deletions * chore: debug logs for file deletions * feat: upload/delete agent knowledge/file-search files * feat: file search UI for agents * feat: first pass, file search tool * chore: update default agent capabilities and info
This commit is contained in:
parent
f33e75e2ee
commit
ad74350036
123 changed files with 3611 additions and 1541 deletions
1
client/src/data-provider/Files/index.ts
Normal file
1
client/src/data-provider/Files/index.ts
Normal file
|
|
@ -0,0 +1 @@
|
|||
export * from './queries';
|
||||
84
client/src/data-provider/Files/queries.ts
Normal file
84
client/src/data-provider/Files/queries.ts
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
import { QueryKeys, dataService } from 'librechat-data-provider';
|
||||
import { useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import type { QueryObserverResult, UseQueryOptions } from '@tanstack/react-query';
|
||||
import type t from 'librechat-data-provider';
|
||||
import { addFileToCache } from '~/utils';
|
||||
|
||||
export const useGetFiles = <TData = t.TFile[] | boolean>(
|
||||
config?: UseQueryOptions<t.TFile[], unknown, TData>,
|
||||
): QueryObserverResult<TData, unknown> => {
|
||||
return useQuery<t.TFile[], unknown, TData>([QueryKeys.files], () => dataService.getFiles(), {
|
||||
refetchOnWindowFocus: false,
|
||||
refetchOnReconnect: false,
|
||||
refetchOnMount: false,
|
||||
...config,
|
||||
});
|
||||
};
|
||||
|
||||
export const useGetFileConfig = <TData = t.FileConfig>(
|
||||
config?: UseQueryOptions<t.FileConfig, unknown, TData>,
|
||||
): QueryObserverResult<TData, unknown> => {
|
||||
return useQuery<t.FileConfig, unknown, TData>(
|
||||
[QueryKeys.fileConfig],
|
||||
() => dataService.getFileConfig(),
|
||||
{
|
||||
refetchOnWindowFocus: false,
|
||||
refetchOnReconnect: false,
|
||||
refetchOnMount: false,
|
||||
...config,
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
export const useFileDownload = (userId?: string, file_id?: string): QueryObserverResult<string> => {
|
||||
const queryClient = useQueryClient();
|
||||
return useQuery(
|
||||
[QueryKeys.fileDownload, file_id],
|
||||
async () => {
|
||||
if (!userId || !file_id) {
|
||||
console.warn('No user ID provided for file download');
|
||||
return;
|
||||
}
|
||||
const response = await dataService.getFileDownload(userId, file_id);
|
||||
const blob = response.data;
|
||||
const downloadURL = window.URL.createObjectURL(blob);
|
||||
try {
|
||||
const metadata: t.TFile | undefined = JSON.parse(response.headers['x-file-metadata']);
|
||||
if (!metadata) {
|
||||
console.warn('No metadata found for file download', response.headers);
|
||||
return downloadURL;
|
||||
}
|
||||
|
||||
addFileToCache(queryClient, metadata);
|
||||
} catch (e) {
|
||||
console.error('Error parsing file metadata, skipped updating file query cache', e);
|
||||
}
|
||||
|
||||
return downloadURL;
|
||||
},
|
||||
{
|
||||
enabled: false,
|
||||
retry: false,
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
export const useCodeOutputDownload = (url = ''): QueryObserverResult<string> => {
|
||||
return useQuery(
|
||||
[QueryKeys.fileDownload, url],
|
||||
async () => {
|
||||
if (!url) {
|
||||
console.warn('No user ID provided for file download');
|
||||
return;
|
||||
}
|
||||
const response = await dataService.getCodeOutputDownload(url);
|
||||
const blob = response.data;
|
||||
const downloadURL = window.URL.createObjectURL(blob);
|
||||
return downloadURL;
|
||||
},
|
||||
{
|
||||
enabled: false,
|
||||
retry: false,
|
||||
},
|
||||
);
|
||||
};
|
||||
|
|
@ -1,3 +1,4 @@
|
|||
export * from './Files';
|
||||
export * from './connection';
|
||||
export * from './mutations';
|
||||
export * from './prompts';
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ import {
|
|||
import { useSetRecoilState } from 'recoil';
|
||||
import { useMutation, useQueryClient } from '@tanstack/react-query';
|
||||
import { dataService, MutationKeys, QueryKeys, defaultOrderQuery } from 'librechat-data-provider';
|
||||
import type t from 'librechat-data-provider';
|
||||
import type * as t from 'librechat-data-provider';
|
||||
import type { InfiniteData, UseMutationResult } from '@tanstack/react-query';
|
||||
import useUpdateTagsInConvo from '~/hooks/Conversations/useUpdateTagsInConvo';
|
||||
import { updateConversationTag } from '~/utils/conversationTags';
|
||||
|
|
@ -537,32 +537,33 @@ export const useDeleteConversationMutation = (
|
|||
(payload: t.TDeleteConversationRequest) => dataService.deleteConversation(payload),
|
||||
{
|
||||
onSuccess: (_data, vars, context) => {
|
||||
if (!vars.conversationId) {
|
||||
const conversationId = vars.conversationId ?? '';
|
||||
if (!conversationId) {
|
||||
return;
|
||||
}
|
||||
|
||||
const handleDelete = (convoData) => {
|
||||
const handleDelete = (convoData: t.ConversationData | undefined) => {
|
||||
if (!convoData) {
|
||||
return convoData;
|
||||
}
|
||||
return normalizeData(
|
||||
deleteConversation(convoData, vars.conversationId as string),
|
||||
deleteConversation(convoData, conversationId),
|
||||
'conversations',
|
||||
convoData.pages[0].pageSize,
|
||||
Number(convoData.pages[0].pageSize),
|
||||
);
|
||||
};
|
||||
|
||||
queryClient.setQueryData([QueryKeys.conversation, vars.conversationId], null);
|
||||
queryClient.setQueryData([QueryKeys.conversation, conversationId], null);
|
||||
queryClient.setQueryData<t.ConversationData>([QueryKeys.allConversations], handleDelete);
|
||||
queryClient.setQueryData<t.ConversationData>(
|
||||
[QueryKeys.archivedConversations],
|
||||
handleDelete,
|
||||
);
|
||||
const current = queryClient.getQueryData<t.ConversationData>([QueryKeys.allConversations]);
|
||||
refetch({ refetchPage: (page, index) => index === (current?.pages.length || 1) - 1 });
|
||||
refetch({ refetchPage: (page, index) => index === (current?.pages.length ?? 1) - 1 });
|
||||
onSuccess?.(_data, vars, context);
|
||||
},
|
||||
...(_options || {}),
|
||||
..._options,
|
||||
},
|
||||
);
|
||||
};
|
||||
|
|
@ -593,7 +594,7 @@ export const useForkConvoMutation = (
|
|||
);
|
||||
onSuccess?.(data, vars, context);
|
||||
},
|
||||
...(_options || {}),
|
||||
..._options,
|
||||
});
|
||||
};
|
||||
|
||||
|
|
@ -642,7 +643,7 @@ export const useUploadFileMutation = (
|
|||
|
||||
return dataService.uploadFile(body);
|
||||
},
|
||||
...(options || {}),
|
||||
...options,
|
||||
onSuccess: (data, formData, context) => {
|
||||
queryClient.setQueryData<t.TFile[] | undefined>([QueryKeys.files], (_files) => [
|
||||
data,
|
||||
|
|
@ -650,11 +651,44 @@ export const useUploadFileMutation = (
|
|||
]);
|
||||
|
||||
const endpoint = formData.get('endpoint');
|
||||
const assistant_id = formData.get('assistant_id');
|
||||
const message_file = formData.get('message_file');
|
||||
const tool_resource = formData.get('tool_resource');
|
||||
const agent_id = (formData.get('agent_id') as string | undefined) ?? '';
|
||||
const assistant_id = (formData.get('assistant_id') as string | undefined) ?? '';
|
||||
const tool_resource = (formData.get('tool_resource') as string | undefined) ?? '';
|
||||
|
||||
if (!assistant_id || message_file === 'true') {
|
||||
if (message_file === 'true') {
|
||||
onSuccess?.(data, formData, context);
|
||||
return;
|
||||
}
|
||||
|
||||
if (agent_id && tool_resource) {
|
||||
queryClient.setQueryData<t.Agent>([QueryKeys.agent, agent_id], (agent) => {
|
||||
if (!agent) {
|
||||
return agent;
|
||||
}
|
||||
|
||||
const update = {};
|
||||
const prevResources = agent.tool_resources ?? {};
|
||||
const prevResource: t.ExecuteCodeResource | t.AgentFileSearchResource = agent
|
||||
.tool_resources?.[tool_resource] ?? {
|
||||
file_ids: [],
|
||||
};
|
||||
if (!prevResource.file_ids) {
|
||||
prevResource.file_ids = [];
|
||||
}
|
||||
prevResource.file_ids.push(data.file_id);
|
||||
update['tool_resources'] = {
|
||||
...prevResources,
|
||||
[tool_resource]: prevResource,
|
||||
};
|
||||
return {
|
||||
...agent,
|
||||
...update,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
if (!assistant_id) {
|
||||
onSuccess?.(data, formData, context);
|
||||
return;
|
||||
}
|
||||
|
|
@ -679,13 +713,16 @@ export const useUploadFileMutation = (
|
|||
}
|
||||
if (tool_resource === EToolResources.code_interpreter) {
|
||||
const prevResources = assistant.tool_resources ?? {};
|
||||
const prevResource = assistant.tool_resources?.[tool_resource as string] ?? {
|
||||
const prevResource = assistant.tool_resources?.[tool_resource] ?? {
|
||||
file_ids: [],
|
||||
};
|
||||
if (!prevResource.file_ids) {
|
||||
prevResource.file_ids = [];
|
||||
}
|
||||
prevResource.file_ids.push(data.file_id);
|
||||
update['tool_resources'] = {
|
||||
...prevResources,
|
||||
[tool_resource as string]: prevResource,
|
||||
[tool_resource]: prevResource,
|
||||
};
|
||||
}
|
||||
return {
|
||||
|
|
@ -712,9 +749,8 @@ export const useDeleteFilesMutation = (
|
|||
const queryClient = useQueryClient();
|
||||
const { onSuccess, ...options } = _options || {};
|
||||
return useMutation([MutationKeys.fileDelete], {
|
||||
mutationFn: (body: t.DeleteFilesBody) =>
|
||||
dataService.deleteFiles(body.files, body.assistant_id, body.tool_resource),
|
||||
...(options || {}),
|
||||
mutationFn: (body: t.DeleteFilesBody) => dataService.deleteFiles(body),
|
||||
...options,
|
||||
onSuccess: (data, ...args) => {
|
||||
queryClient.setQueryData<t.TFile[] | undefined>([QueryKeys.files], (cachefiles) => {
|
||||
const { files: filesDeleted } = args[0];
|
||||
|
|
@ -1228,6 +1264,8 @@ export const useUpdateAgentMutation = (
|
|||
return agent;
|
||||
}),
|
||||
});
|
||||
|
||||
queryClient.setQueryData<t.Agent>([QueryKeys.agent, variables.agent_id], updatedAgent);
|
||||
return options?.onSuccess?.(updatedAgent, variables, context);
|
||||
},
|
||||
},
|
||||
|
|
|
|||
|
|
@ -10,15 +10,12 @@ import type {
|
|||
UseInfiniteQueryOptions,
|
||||
QueryObserverResult,
|
||||
UseQueryOptions,
|
||||
UseQueryResult,
|
||||
} from '@tanstack/react-query';
|
||||
import type t from 'librechat-data-provider';
|
||||
import type {
|
||||
Action,
|
||||
TPreset,
|
||||
TFile,
|
||||
TPlugin,
|
||||
FileConfig,
|
||||
ConversationListResponse,
|
||||
ConversationListParams,
|
||||
Assistant,
|
||||
|
|
@ -32,36 +29,8 @@ import type {
|
|||
TCheckUserKeyResponse,
|
||||
SharedLinkListParams,
|
||||
SharedLinksResponse,
|
||||
TUserTermsResponse,
|
||||
TAcceptTermsResponse,
|
||||
} from 'librechat-data-provider';
|
||||
import { findPageForConversation, addFileToCache } from '~/utils';
|
||||
|
||||
export const useGetFiles = <TData = TFile[] | boolean>(
|
||||
config?: UseQueryOptions<TFile[], unknown, TData>,
|
||||
): QueryObserverResult<TData, unknown> => {
|
||||
return useQuery<TFile[], unknown, TData>([QueryKeys.files], () => dataService.getFiles(), {
|
||||
refetchOnWindowFocus: false,
|
||||
refetchOnReconnect: false,
|
||||
refetchOnMount: false,
|
||||
...config,
|
||||
});
|
||||
};
|
||||
|
||||
export const useGetFileConfig = <TData = FileConfig>(
|
||||
config?: UseQueryOptions<FileConfig, unknown, TData>,
|
||||
): QueryObserverResult<TData, unknown> => {
|
||||
return useQuery<FileConfig, unknown, TData>(
|
||||
[QueryKeys.fileConfig],
|
||||
() => dataService.getFileConfig(),
|
||||
{
|
||||
refetchOnWindowFocus: false,
|
||||
refetchOnReconnect: false,
|
||||
refetchOnMount: false,
|
||||
...config,
|
||||
},
|
||||
);
|
||||
};
|
||||
import { findPageForConversation } from '~/utils';
|
||||
|
||||
export const useGetPresetsQuery = (
|
||||
config?: UseQueryOptions<TPreset[]>,
|
||||
|
|
@ -321,7 +290,7 @@ export const useGetAssistantByIdQuery = (
|
|||
const queryClient = useQueryClient();
|
||||
const endpointsConfig = queryClient.getQueryData<TEndpointsConfig>([QueryKeys.endpoints]);
|
||||
const keyExpiry = queryClient.getQueryData<TCheckUserKeyResponse>([QueryKeys.name, endpoint]);
|
||||
const userProvidesKey = !!endpointsConfig?.[endpoint]?.userProvide;
|
||||
const userProvidesKey = endpointsConfig?.[endpoint]?.userProvide ?? false;
|
||||
const keyProvided = userProvidesKey ? !!keyExpiry?.expiresAt : true;
|
||||
const enabled = !!endpointsConfig?.[endpoint] && keyProvided;
|
||||
const version = endpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint];
|
||||
|
|
@ -401,39 +370,6 @@ export const useGetAssistantDocsQuery = <TData = AssistantDocument[]>(
|
|||
);
|
||||
};
|
||||
|
||||
export const useFileDownload = (userId?: string, file_id?: string): QueryObserverResult<string> => {
|
||||
const queryClient = useQueryClient();
|
||||
return useQuery(
|
||||
[QueryKeys.fileDownload, file_id],
|
||||
async () => {
|
||||
if (!userId || !file_id) {
|
||||
console.warn('No user ID provided for file download');
|
||||
return;
|
||||
}
|
||||
const response = await dataService.getFileDownload(userId, file_id);
|
||||
const blob = response.data;
|
||||
const downloadURL = window.URL.createObjectURL(blob);
|
||||
try {
|
||||
const metadata: TFile | undefined = JSON.parse(response.headers['x-file-metadata']);
|
||||
if (!metadata) {
|
||||
console.warn('No metadata found for file download', response.headers);
|
||||
return downloadURL;
|
||||
}
|
||||
|
||||
addFileToCache(queryClient, metadata);
|
||||
} catch (e) {
|
||||
console.error('Error parsing file metadata, skipped updating file query cache', e);
|
||||
}
|
||||
|
||||
return downloadURL;
|
||||
},
|
||||
{
|
||||
enabled: false,
|
||||
retry: false,
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* AGENTS
|
||||
*/
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue