mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-09-22 08:12:00 +02:00
🧩 feat: Web Search Config Validations & Clipboard Citation Processing (#7530)
* 🔧 chore: Add missing optional `scraperTimeout` to webSearchSchema
* chore: Add missing optional `scraperTimeout` to web search authentication result
* chore: linting
* feat: Integrate attachment handling and citation processing in message components
- Added `useAttachments` hook to manage message attachments and search results.
- Updated `MessageParts`, `ContentParts`, and `ContentRender` components to utilize the new hook for improved attachment handling.
- Enhanced `useCopyToClipboard` to format citations correctly, including support for composite citations and deduplication.
- Introduced utility functions for citation processing and cleanup.
- Added tests for the new `useCopyToClipboard` functionality to ensure proper citation formatting and handling.
* feat: Add configuration for LibreChat Code Interpreter API and Web Search variables
* fix: Update searchResults type to use SearchResultData for better type safety
* feat: Add web search configuration validation and logging
- Introduced `checkWebSearchConfig` function to validate web search configuration values, ensuring they are environment variable references.
- Added logging for proper configuration and warnings for incorrect values.
- Created unit tests for `checkWebSearchConfig` to cover various scenarios, including valid and invalid configurations.
* docs: Update README to include Web Search feature details
- Added a section for the Web Search feature, highlighting its capabilities to search the internet and enhance AI context.
- Included links for further information on the Web Search functionality.
* ci: Add mock for checkWebSearchConfig in AppService tests
* chore: linting
* feat: Enhance Shared Messages with Web Search UI by adding searchResults prop to SearchContent and MinimalHoverButtons components
* chore: linting
* refactor: remove Meilisearch index sync from importConversations function
* feat: update safeSearch implementation to use SafeSearchTypes enum
* refactor: remove commented-out code in loadTools function
* fix: ensure responseMessageId handles latestMessage ID correctly
* feat: enhance Vite configuration for improved chunking and caching
- Added additional globIgnores for map files in Workbox configuration.
- Implemented high-impact chunking for various large libraries to optimize performance.
- Increased chunkSizeWarningLimit from 1200 to 1500 for better handling of larger chunks.
* refactor: move health check hook to Root, fix bad setState for Temporary state
- Enhanced the `useHealthCheck` hook to initiate health checks only when the user is authenticated.
- Added logic for managing health check intervals and handling window focus events.
- Introduced a new test suite for `useHealthCheck` to cover various scenarios including authentication state changes and error handling.
- Removed the health check invocation from `ChatRoute` and added it to `Root` for global health monitoring.
* fix: update font alias in Vite configuration for correct path resolution
This commit is contained in:
parent
cede5d120c
commit
b2f44fc90f
34 changed files with 1709 additions and 140 deletions
30
.env.example
30
.env.example
|
@ -590,3 +590,33 @@ HELP_AND_FAQ_URL=https://librechat.ai
|
|||
# OpenWeather #
|
||||
#=====================================================#
|
||||
OPENWEATHER_API_KEY=
|
||||
|
||||
#====================================#
|
||||
# LibreChat Code Interpreter API #
|
||||
#====================================#
|
||||
|
||||
# https://code.librechat.ai
|
||||
# LIBRECHAT_CODE_API_KEY=your-key
|
||||
|
||||
#======================#
|
||||
# Web Search #
|
||||
#======================#
|
||||
|
||||
# Note: All of the following variable names can be customized.
|
||||
# Omit values to allow user to provide them.
|
||||
|
||||
# For more information on configuration values, see:
|
||||
# https://librechat.ai/docs/features/web_search
|
||||
|
||||
# Search Provider (Required)
|
||||
# SERPER_API_KEY=your_serper_api_key
|
||||
|
||||
# Scraper (Required)
|
||||
# FIRECRAWL_API_KEY=your_firecrawl_api_key
|
||||
# Optional: Custom Firecrawl API URL
|
||||
# FIRECRAWL_API_URL=your_firecrawl_api_url
|
||||
|
||||
# Reranker (Required)
|
||||
# JINA_API_KEY=your_jina_api_key
|
||||
# or
|
||||
# COHERE_API_KEY=your_cohere_api_key
|
|
@ -71,6 +71,11 @@
|
|||
- [Model Context Protocol (MCP) Support](https://modelcontextprotocol.io/clients#librechat) for Tools
|
||||
- Use LibreChat Agents and OpenAI Assistants with Files, Code Interpreter, Tools, and API Actions
|
||||
|
||||
- 🔍 **Web Search**:
|
||||
- Search the internet and retrieve relevant information to enhance your AI context
|
||||
- Combines search providers, content scrapers, and result rerankers for optimal results
|
||||
- **[Learn More →](https://www.librechat.ai/docs/features/web_search)**
|
||||
|
||||
- 🪄 **Generative UI with Code Artifacts**:
|
||||
- [Code Artifacts](https://youtu.be/GfTj7O4gmd0?si=WJbdnemZpJzBrJo3) allow creation of React, HTML, and Mermaid diagrams directly in chat
|
||||
|
||||
|
|
|
@ -277,10 +277,6 @@ const loadTools = async ({
|
|||
});
|
||||
const { onSearchResults, onGetHighlights } = options?.[Tools.web_search] ?? {};
|
||||
requestedTools[tool] = async () => {
|
||||
// const { files, toolContext } = await primeSearchFiles(options);
|
||||
// if (toolContext) {
|
||||
// toolContextMap[tool] = toolContext;
|
||||
// }
|
||||
toolContextMap[tool] = `# \`${tool}\`:
|
||||
Current Date & Time: ${replaceSpecialVars({ text: '{{iso_datetime}}' })}
|
||||
1. **Execute immediately without preface** when using \`${tool}\`.
|
||||
|
|
|
@ -74,7 +74,7 @@ router.post('/gen_title', async (req, res) => {
|
|||
res.status(200).json({ title });
|
||||
} else {
|
||||
res.status(404).json({
|
||||
message: 'Title not found or method not implemented for the conversation\'s endpoint',
|
||||
message: "Title not found or method not implemented for the conversation's endpoint",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
|
|
@ -25,6 +25,7 @@ jest.mock('./start/checks', () => ({
|
|||
checkHealth: jest.fn(),
|
||||
checkConfig: jest.fn(),
|
||||
checkAzureVariables: jest.fn(),
|
||||
checkWebSearchConfig: jest.fn(),
|
||||
}));
|
||||
|
||||
const AppService = require('./AppService');
|
||||
|
|
|
@ -6,7 +6,13 @@ const {
|
|||
getConfigDefaults,
|
||||
loadWebSearchConfig,
|
||||
} = require('librechat-data-provider');
|
||||
const { checkVariables, checkHealth, checkConfig, checkAzureVariables } = require('./start/checks');
|
||||
const {
|
||||
checkHealth,
|
||||
checkConfig,
|
||||
checkVariables,
|
||||
checkAzureVariables,
|
||||
checkWebSearchConfig,
|
||||
} = require('./start/checks');
|
||||
const { azureAssistantsDefaults, assistantsConfigSetup } = require('./start/assistants');
|
||||
const { initializeAzureBlobService } = require('./Files/Azure/initialize');
|
||||
const { initializeFirebase } = require('./Files/Firebase/initialize');
|
||||
|
@ -37,6 +43,7 @@ const AppService = async (app) => {
|
|||
|
||||
const ocr = loadOCRConfig(config.ocr);
|
||||
const webSearch = loadWebSearchConfig(config.webSearch);
|
||||
checkWebSearchConfig(webSearch);
|
||||
const filteredTools = config.filteredTools;
|
||||
const includedTools = config.includedTools;
|
||||
const fileStrategy = config.fileStrategy ?? configDefaults.fileStrategy;
|
||||
|
|
|
@ -146,7 +146,7 @@ describe('AppService', () => {
|
|||
firecrawlApiKey: '${FIRECRAWL_API_KEY}',
|
||||
firecrawlApiUrl: '${FIRECRAWL_API_URL}',
|
||||
jinaApiKey: '${JINA_API_KEY}',
|
||||
safeSearch: true,
|
||||
safeSearch: 1,
|
||||
serperApiKey: '${SERPER_API_KEY}',
|
||||
},
|
||||
});
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
const {
|
||||
Constants,
|
||||
webSearchKeys,
|
||||
deprecatedAzureVariables,
|
||||
conflictingAzureVariables,
|
||||
extractVariableName,
|
||||
} = require('librechat-data-provider');
|
||||
const { isEnabled, checkEmailConfig } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
@ -141,4 +143,56 @@ function checkPasswordReset() {
|
|||
}
|
||||
}
|
||||
|
||||
module.exports = { checkVariables, checkHealth, checkConfig, checkAzureVariables };
|
||||
/**
|
||||
* Checks web search configuration values to ensure they are environment variable references.
|
||||
* Warns if actual API keys or URLs are used instead of environment variable references.
|
||||
* Logs debug information for properly configured environment variable references.
|
||||
* @param {Object} webSearchConfig - The loaded web search configuration object.
|
||||
*/
|
||||
function checkWebSearchConfig(webSearchConfig) {
|
||||
if (!webSearchConfig) {
|
||||
return;
|
||||
}
|
||||
|
||||
webSearchKeys.forEach((key) => {
|
||||
const value = webSearchConfig[key];
|
||||
|
||||
if (typeof value === 'string') {
|
||||
const varName = extractVariableName(value);
|
||||
|
||||
if (varName) {
|
||||
// This is a proper environment variable reference
|
||||
const actualValue = process.env[varName];
|
||||
if (actualValue) {
|
||||
logger.debug(`Web search ${key}: Using environment variable ${varName} with value set`);
|
||||
} else {
|
||||
logger.debug(
|
||||
`Web search ${key}: Using environment variable ${varName} (not set in environment, user provided value)`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// This is not an environment variable reference - warn user
|
||||
logger.warn(
|
||||
`❗ Web search configuration error: ${key} contains an actual value instead of an environment variable reference.
|
||||
|
||||
Current value: "${value.substring(0, 10)}..."
|
||||
|
||||
This is incorrect! You should use environment variable references in your librechat.yaml file, such as:
|
||||
${key}: "\${YOUR_ENV_VAR_NAME}"
|
||||
|
||||
Then set the actual API key in your .env file or environment variables.
|
||||
|
||||
More info: https://www.librechat.ai/docs/configuration/librechat_yaml/web_search`,
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
checkHealth,
|
||||
checkConfig,
|
||||
checkVariables,
|
||||
checkAzureVariables,
|
||||
checkWebSearchConfig,
|
||||
};
|
||||
|
|
203
api/server/services/start/checks.spec.js
Normal file
203
api/server/services/start/checks.spec.js
Normal file
|
@ -0,0 +1,203 @@
|
|||
// Mock librechat-data-provider
|
||||
jest.mock('librechat-data-provider', () => ({
|
||||
...jest.requireActual('librechat-data-provider'),
|
||||
extractVariableName: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock the config logger
|
||||
jest.mock('~/config', () => ({
|
||||
logger: {
|
||||
debug: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
const { checkWebSearchConfig } = require('./checks');
|
||||
const { logger } = require('~/config');
|
||||
const { extractVariableName } = require('librechat-data-provider');
|
||||
|
||||
describe('checkWebSearchConfig', () => {
|
||||
let originalEnv;
|
||||
|
||||
beforeEach(() => {
|
||||
// Clear all mocks
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Store original environment
|
||||
originalEnv = process.env;
|
||||
|
||||
// Reset process.env
|
||||
process.env = { ...originalEnv };
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original environment
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
describe('when webSearchConfig is undefined or null', () => {
|
||||
it('should return early without logging when config is undefined', () => {
|
||||
checkWebSearchConfig(undefined);
|
||||
|
||||
expect(logger.debug).not.toHaveBeenCalled();
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return early without logging when config is null', () => {
|
||||
checkWebSearchConfig(null);
|
||||
|
||||
expect(logger.debug).not.toHaveBeenCalled();
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when config values are proper environment variable references', () => {
|
||||
it('should log debug message for each valid environment variable with value set', () => {
|
||||
const config = {
|
||||
serperApiKey: '${SERPER_API_KEY}',
|
||||
jinaApiKey: '${JINA_API_KEY}',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValueOnce('SERPER_API_KEY').mockReturnValueOnce('JINA_API_KEY');
|
||||
|
||||
process.env.SERPER_API_KEY = 'test-serper-key';
|
||||
process.env.JINA_API_KEY = 'test-jina-key';
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(extractVariableName).toHaveBeenCalledWith('${SERPER_API_KEY}');
|
||||
expect(extractVariableName).toHaveBeenCalledWith('${JINA_API_KEY}');
|
||||
expect(logger.debug).toHaveBeenCalledWith(
|
||||
'Web search serperApiKey: Using environment variable SERPER_API_KEY with value set',
|
||||
);
|
||||
expect(logger.debug).toHaveBeenCalledWith(
|
||||
'Web search jinaApiKey: Using environment variable JINA_API_KEY with value set',
|
||||
);
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should log debug message for environment variables not set in environment', () => {
|
||||
const config = {
|
||||
cohereApiKey: '${COHERE_API_KEY}',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValue('COHERE_API_KEY');
|
||||
|
||||
delete process.env.COHERE_API_KEY;
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(logger.debug).toHaveBeenCalledWith(
|
||||
'Web search cohereApiKey: Using environment variable COHERE_API_KEY (not set in environment, user provided value)',
|
||||
);
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when config values are actual values instead of environment variable references', () => {
|
||||
it('should warn when serperApiKey contains actual API key', () => {
|
||||
const config = {
|
||||
serperApiKey: 'sk-1234567890abcdef',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValue(null);
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining(
|
||||
'❗ Web search configuration error: serperApiKey contains an actual value',
|
||||
),
|
||||
);
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Current value: "sk-1234567..."'),
|
||||
);
|
||||
expect(logger.debug).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should warn when firecrawlApiUrl contains actual URL', () => {
|
||||
const config = {
|
||||
firecrawlApiUrl: 'https://api.firecrawl.dev',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValue(null);
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining(
|
||||
'❗ Web search configuration error: firecrawlApiUrl contains an actual value',
|
||||
),
|
||||
);
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Current value: "https://ap..."'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should include documentation link in warning message', () => {
|
||||
const config = {
|
||||
firecrawlApiKey: 'fc-actual-key',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValue(null);
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining(
|
||||
'More info: https://www.librechat.ai/docs/configuration/librechat_yaml/web_search',
|
||||
),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when config contains mixed value types', () => {
|
||||
it('should only process string values and ignore non-string values', () => {
|
||||
const config = {
|
||||
serperApiKey: '${SERPER_API_KEY}',
|
||||
safeSearch: 1,
|
||||
scraperTimeout: 7500,
|
||||
jinaApiKey: 'actual-key',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValueOnce('SERPER_API_KEY').mockReturnValueOnce(null);
|
||||
|
||||
process.env.SERPER_API_KEY = 'test-key';
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(extractVariableName).toHaveBeenCalledTimes(2);
|
||||
expect(logger.debug).toHaveBeenCalledTimes(1);
|
||||
expect(logger.warn).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle config with no web search keys', () => {
|
||||
const config = {
|
||||
someOtherKey: 'value',
|
||||
anotherKey: '${SOME_VAR}',
|
||||
};
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(extractVariableName).not.toHaveBeenCalled();
|
||||
expect(logger.debug).not.toHaveBeenCalled();
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should truncate long values in warning messages', () => {
|
||||
const config = {
|
||||
serperApiKey: 'this-is-a-very-long-api-key-that-should-be-truncated-in-the-warning-message',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValue(null);
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Current value: "this-is-a-..."'),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,6 +1,5 @@
|
|||
const fs = require('fs').promises;
|
||||
const { getImporter } = require('./importers');
|
||||
const { indexSync } = require('~/lib/db');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
|
@ -15,8 +14,6 @@ const importConversations = async (job) => {
|
|||
const jsonData = JSON.parse(fileData);
|
||||
const importer = getImporter(jsonData);
|
||||
await importer(jsonData, requestUserId);
|
||||
// Sync Meilisearch index
|
||||
await indexSync();
|
||||
logger.debug(`user: ${requestUserId} | Finished importing conversations`);
|
||||
} catch (error) {
|
||||
logger.error(`user: ${requestUserId} | Failed to import conversation: `, error);
|
||||
|
|
|
@ -1,13 +1,17 @@
|
|||
import { memo, useMemo, useState } from 'react';
|
||||
import { useRecoilValue, useRecoilState } from 'recoil';
|
||||
import { useRecoilState } from 'recoil';
|
||||
import { ContentTypes } from 'librechat-data-provider';
|
||||
import type { TMessageContentParts, TAttachment, Agents } from 'librechat-data-provider';
|
||||
import { useSearchResultsByTurn } from '~/hooks/Messages/useSearchResultsByTurn';
|
||||
import type {
|
||||
TMessageContentParts,
|
||||
SearchResultData,
|
||||
TAttachment,
|
||||
Agents,
|
||||
} from 'librechat-data-provider';
|
||||
import { ThinkingButton } from '~/components/Artifacts/Thinking';
|
||||
import useLocalize from '~/hooks/useLocalize';
|
||||
import { mapAttachments } from '~/utils/map';
|
||||
import { MessageContext, SearchContext } from '~/Providers';
|
||||
import Sources from '~/components/Web/Sources';
|
||||
import useLocalize from '~/hooks/useLocalize';
|
||||
import { mapAttachments } from '~/utils/map';
|
||||
import { EditTextPart } from './Parts';
|
||||
import store from '~/store';
|
||||
import Part from './Part';
|
||||
|
@ -17,6 +21,7 @@ type ContentPartsProps = {
|
|||
messageId: string;
|
||||
conversationId?: string | null;
|
||||
attachments?: TAttachment[];
|
||||
searchResults?: { [key: string]: SearchResultData };
|
||||
isCreatedByUser: boolean;
|
||||
isLast: boolean;
|
||||
isSubmitting: boolean;
|
||||
|
@ -35,6 +40,7 @@ const ContentParts = memo(
|
|||
messageId,
|
||||
conversationId,
|
||||
attachments,
|
||||
searchResults,
|
||||
isCreatedByUser,
|
||||
isLast,
|
||||
isSubmitting,
|
||||
|
@ -44,15 +50,9 @@ const ContentParts = memo(
|
|||
setSiblingIdx,
|
||||
}: ContentPartsProps) => {
|
||||
const localize = useLocalize();
|
||||
const messageAttachmentsMap = useRecoilValue(store.messageAttachmentsMap);
|
||||
const [showThinking, setShowThinking] = useRecoilState<boolean>(store.showThinking);
|
||||
const [isExpanded, setIsExpanded] = useState(showThinking);
|
||||
const messageAttachments = useMemo(
|
||||
() => attachments ?? messageAttachmentsMap[messageId] ?? [],
|
||||
[attachments, messageAttachmentsMap, messageId],
|
||||
);
|
||||
const searchResults = useSearchResultsByTurn(messageAttachments);
|
||||
const attachmentMap = useMemo(() => mapAttachments(messageAttachments), [messageAttachments]);
|
||||
const attachmentMap = useMemo(() => mapAttachments(attachments ?? []), [attachments]);
|
||||
|
||||
const hasReasoningParts = useMemo(() => {
|
||||
const hasThinkPart = content?.some((part) => part?.type === ContentTypes.THINK) ?? false;
|
||||
|
|
|
@ -1,26 +1,40 @@
|
|||
import { Suspense, useMemo } from 'react';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import { ContentTypes } from 'librechat-data-provider';
|
||||
import type { Agents, TMessage, TMessageContentParts } from 'librechat-data-provider';
|
||||
import type {
|
||||
Agents,
|
||||
TMessage,
|
||||
TAttachment,
|
||||
SearchResultData,
|
||||
TMessageContentParts,
|
||||
} from 'librechat-data-provider';
|
||||
import { UnfinishedMessage } from './MessageContent';
|
||||
import { DelayedRender } from '~/components/ui';
|
||||
import MarkdownLite from './MarkdownLite';
|
||||
import Sources from '~/components/Web/Sources';
|
||||
import { cn, mapAttachments } from '~/utils';
|
||||
import { SearchContext } from '~/Providers';
|
||||
import MarkdownLite from './MarkdownLite';
|
||||
import store from '~/store';
|
||||
import Part from './Part';
|
||||
|
||||
const SearchContent = ({ message }: { message: TMessage }) => {
|
||||
const SearchContent = ({
|
||||
message,
|
||||
attachments,
|
||||
searchResults,
|
||||
}: {
|
||||
message: TMessage;
|
||||
attachments?: TAttachment[];
|
||||
searchResults?: { [key: string]: SearchResultData };
|
||||
}) => {
|
||||
const enableUserMsgMarkdown = useRecoilValue(store.enableUserMsgMarkdown);
|
||||
const { messageId } = message;
|
||||
const messageAttachmentsMap = useRecoilValue(store.messageAttachmentsMap);
|
||||
const attachmentMap = useMemo(
|
||||
() => mapAttachments(message?.attachments ?? messageAttachmentsMap[messageId] ?? []),
|
||||
[message?.attachments, messageAttachmentsMap, messageId],
|
||||
);
|
||||
|
||||
const attachmentMap = useMemo(() => mapAttachments(attachments ?? []), [attachments]);
|
||||
|
||||
if (Array.isArray(message.content) && message.content.length > 0) {
|
||||
return (
|
||||
<>
|
||||
<SearchContext.Provider value={{ searchResults }}>
|
||||
<Sources />
|
||||
{message.content
|
||||
.filter((part: TMessageContentParts | undefined) => part)
|
||||
.map((part: TMessageContentParts | undefined, idx: number) => {
|
||||
|
@ -49,7 +63,7 @@ const SearchContent = ({ message }: { message: TMessage }) => {
|
|||
</DelayedRender>
|
||||
</Suspense>
|
||||
)}
|
||||
</>
|
||||
</SearchContext.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -2,8 +2,8 @@ import React, { useMemo } from 'react';
|
|||
import { useRecoilValue } from 'recoil';
|
||||
import type { TMessageContentParts } from 'librechat-data-provider';
|
||||
import type { TMessageProps, TMessageIcon } from '~/common';
|
||||
import { useMessageHelpers, useLocalize, useAttachments } from '~/hooks';
|
||||
import MessageIcon from '~/components/Chat/Messages/MessageIcon';
|
||||
import { useMessageHelpers, useLocalize } from '~/hooks';
|
||||
import ContentParts from './Content/ContentParts';
|
||||
import SiblingSwitch from './SiblingSwitch';
|
||||
|
||||
|
@ -17,7 +17,10 @@ export default function Message(props: TMessageProps) {
|
|||
const localize = useLocalize();
|
||||
const { message, siblingIdx, siblingCount, setSiblingIdx, currentEditId, setCurrentEditId } =
|
||||
props;
|
||||
|
||||
const { attachments, searchResults } = useAttachments({
|
||||
messageId: message?.messageId,
|
||||
attachments: message?.attachments,
|
||||
});
|
||||
const {
|
||||
edit,
|
||||
index,
|
||||
|
@ -91,7 +94,7 @@ export default function Message(props: TMessageProps) {
|
|||
>
|
||||
<div className="m-auto justify-center p-4 py-2 md:gap-6">
|
||||
<div
|
||||
id={messageId}
|
||||
id={messageId ?? ''}
|
||||
aria-label={`message-${message.depth}-${messageId}`}
|
||||
className={cn(baseClasses.common, baseClasses.chat, 'message-render')}
|
||||
>
|
||||
|
@ -116,10 +119,11 @@ export default function Message(props: TMessageProps) {
|
|||
isLast={isLast}
|
||||
enterEdit={enterEdit}
|
||||
siblingIdx={siblingIdx}
|
||||
messageId={message.messageId}
|
||||
attachments={attachments}
|
||||
isSubmitting={isSubmitting}
|
||||
searchResults={searchResults}
|
||||
messageId={message.messageId}
|
||||
setSiblingIdx={setSiblingIdx}
|
||||
attachments={message.attachments}
|
||||
isCreatedByUser={message.isCreatedByUser}
|
||||
conversationId={conversation?.conversationId}
|
||||
content={message.content as Array<TMessageContentParts | undefined>}
|
||||
|
|
|
@ -1,16 +1,21 @@
|
|||
import { useState } from 'react';
|
||||
import type { TMessage } from 'librechat-data-provider';
|
||||
import type { TMessage, TAttachment, SearchResultData } from 'librechat-data-provider';
|
||||
import { useLocalize, useCopyToClipboard } from '~/hooks';
|
||||
import { Clipboard, CheckMark } from '~/components/svg';
|
||||
|
||||
type THoverButtons = {
|
||||
message: TMessage;
|
||||
searchResults?: { [key: string]: SearchResultData };
|
||||
};
|
||||
|
||||
export default function MinimalHoverButtons({ message }: THoverButtons) {
|
||||
export default function MinimalHoverButtons({ message, searchResults }: THoverButtons) {
|
||||
const localize = useLocalize();
|
||||
const [isCopied, setIsCopied] = useState(false);
|
||||
const copyToClipboard = useCopyToClipboard({ text: message.text, content: message.content });
|
||||
const copyToClipboard = useCopyToClipboard({
|
||||
text: message.text,
|
||||
content: message.content,
|
||||
searchResults,
|
||||
});
|
||||
|
||||
return (
|
||||
<div className="visible mt-0 flex justify-center gap-1 self-end text-gray-400 lg:justify-start">
|
||||
|
|
|
@ -7,8 +7,8 @@ import PlaceholderRow from '~/components/Chat/Messages/ui/PlaceholderRow';
|
|||
import SiblingSwitch from '~/components/Chat/Messages/SiblingSwitch';
|
||||
import HoverButtons from '~/components/Chat/Messages/HoverButtons';
|
||||
import MessageIcon from '~/components/Chat/Messages/MessageIcon';
|
||||
import { useAttachments, useMessageActions } from '~/hooks';
|
||||
import SubRow from '~/components/Chat/Messages/SubRow';
|
||||
import { useMessageActions } from '~/hooks';
|
||||
import { cn, logger } from '~/utils';
|
||||
import store from '~/store';
|
||||
|
||||
|
@ -34,6 +34,10 @@ const ContentRender = memo(
|
|||
setCurrentEditId,
|
||||
isSubmittingFamily = false,
|
||||
}: ContentRenderProps) => {
|
||||
const { attachments, searchResults } = useAttachments({
|
||||
messageId: msg?.messageId,
|
||||
attachments: msg?.attachments,
|
||||
});
|
||||
const {
|
||||
edit,
|
||||
index,
|
||||
|
@ -50,6 +54,7 @@ const ContentRender = memo(
|
|||
regenerateMessage,
|
||||
} = useMessageActions({
|
||||
message: msg,
|
||||
searchResults,
|
||||
currentEditId,
|
||||
isMultiMessage,
|
||||
setCurrentEditId,
|
||||
|
@ -164,9 +169,10 @@ const ContentRender = memo(
|
|||
enterEdit={enterEdit}
|
||||
siblingIdx={siblingIdx}
|
||||
messageId={msg.messageId}
|
||||
attachments={attachments}
|
||||
isSubmitting={isSubmitting}
|
||||
searchResults={searchResults}
|
||||
setSiblingIdx={setSiblingIdx}
|
||||
attachments={msg.attachments}
|
||||
isCreatedByUser={msg.isCreatedByUser}
|
||||
conversationId={conversation?.conversationId}
|
||||
content={msg.content as Array<TMessageContentParts | undefined>}
|
||||
|
|
|
@ -7,7 +7,8 @@ import SiblingSwitch from '~/components/Chat/Messages/SiblingSwitch';
|
|||
import { Plugin } from '~/components/Messages/Content';
|
||||
import SubRow from '~/components/Chat/Messages/SubRow';
|
||||
import { MessageContext } from '~/Providers';
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import { useAttachments } from '~/hooks';
|
||||
|
||||
import MultiMessage from './MultiMessage';
|
||||
import { cn } from '~/utils';
|
||||
import store from '~/store';
|
||||
|
@ -25,6 +26,11 @@ export default function Message(props: TMessageProps) {
|
|||
setCurrentEditId,
|
||||
} = props;
|
||||
|
||||
const { attachments, searchResults } = useAttachments({
|
||||
messageId: message?.messageId,
|
||||
attachments: message?.attachments,
|
||||
});
|
||||
|
||||
if (!message) {
|
||||
return null;
|
||||
}
|
||||
|
@ -48,8 +54,8 @@ export default function Message(props: TMessageProps) {
|
|||
return (
|
||||
<>
|
||||
<div className="text-token-text-primary w-full border-0 bg-transparent dark:border-0 dark:bg-transparent">
|
||||
<div className="m-auto justify-center p-4 py-2 md:gap-6 ">
|
||||
<div className="final-completion group mx-auto flex flex-1 gap-3 md:max-w-3xl md:px-5 lg:max-w-[40rem] lg:px-1 xl:max-w-[48rem] xl:px-5">
|
||||
<div className="m-auto justify-center p-4 py-2 md:gap-6">
|
||||
<div className="final-completion group mx-auto flex flex-1 gap-3 md:max-w-[47rem] md:px-5 lg:px-1 xl:max-w-[55rem] xl:px-5">
|
||||
<div className="relative flex flex-shrink-0 flex-col items-end">
|
||||
<div>
|
||||
<div className="pt-0.5">
|
||||
|
@ -68,13 +74,18 @@ export default function Message(props: TMessageProps) {
|
|||
<MessageContext.Provider
|
||||
value={{
|
||||
messageId,
|
||||
isExpanded: false,
|
||||
conversationId: conversation?.conversationId,
|
||||
}}
|
||||
>
|
||||
{/* Legacy Plugins */}
|
||||
{message.plugin && <Plugin plugin={message.plugin} />}
|
||||
{message.content ? (
|
||||
<SearchContent message={message} />
|
||||
<SearchContent
|
||||
message={message}
|
||||
attachments={attachments}
|
||||
searchResults={searchResults}
|
||||
/>
|
||||
) : (
|
||||
<MessageContent
|
||||
edit={false}
|
||||
|
@ -100,7 +111,7 @@ export default function Message(props: TMessageProps) {
|
|||
siblingCount={siblingCount}
|
||||
setSiblingIdx={setSiblingIdx}
|
||||
/>
|
||||
<MinimalHoverButtons message={message} />
|
||||
<MinimalHoverButtons message={message} searchResults={searchResults} />
|
||||
</SubRow>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -15,7 +15,7 @@ function SharedView() {
|
|||
const { shareId } = useParams();
|
||||
const { data, isLoading } = useGetSharedMessages(shareId ?? '');
|
||||
const dataTree = data && buildTree({ messages: data.messages });
|
||||
const messagesTree = dataTree?.length === 0 ? null : dataTree ?? null;
|
||||
const messagesTree = dataTree?.length === 0 ? null : (dataTree ?? null);
|
||||
|
||||
// configure document title
|
||||
let docTitle = '';
|
||||
|
@ -37,7 +37,7 @@ function SharedView() {
|
|||
} else if (data && messagesTree && messagesTree.length !== 0) {
|
||||
content = (
|
||||
<>
|
||||
<div className="final-completion group mx-auto flex min-w-[40rem] flex-col gap-3 pb-6 pt-4 md:max-w-3xl md:px-5 lg:max-w-[40rem] lg:px-1 xl:max-w-[48rem] xl:px-5">
|
||||
<div className="final-completion group mx-auto flex min-w-[40rem] flex-col gap-3 pb-6 pt-4 md:max-w-[47rem] md:px-5 lg:px-1 xl:max-w-[55rem] xl:px-5">
|
||||
<h1 className="text-4xl font-bold">{data.title}</h1>
|
||||
<div className="border-b border-border-medium pb-6 text-base text-text-secondary">
|
||||
{new Date(data.createdAt).toLocaleDateString('en-US', {
|
||||
|
@ -53,7 +53,7 @@ function SharedView() {
|
|||
);
|
||||
} else {
|
||||
content = (
|
||||
<div className="flex h-screen items-center justify-center ">
|
||||
<div className="flex h-screen items-center justify-center">
|
||||
{localize('com_ui_shared_link_not_found')}
|
||||
</div>
|
||||
);
|
||||
|
|
|
@ -1,11 +1,7 @@
|
|||
import { visit } from 'unist-util-visit';
|
||||
import type { Node } from 'unist';
|
||||
import type { Citation, CitationNode } from './types';
|
||||
|
||||
const SPAN_REGEX = /(\\ue203.*?\\ue204)/g;
|
||||
const COMPOSITE_REGEX = /(\\ue200.*?\\ue201)/g;
|
||||
const STANDALONE_PATTERN = /\\ue202turn(\d+)(search|image|news|video|ref)(\d+)/g;
|
||||
const CLEANUP_REGEX = /\\ue200|\\ue201|\\ue202|\\ue203|\\ue204|\\ue206/g;
|
||||
import { SPAN_REGEX, STANDALONE_PATTERN, CLEANUP_REGEX, COMPOSITE_REGEX } from '~/utils/citations';
|
||||
|
||||
/**
|
||||
* Checks if a standalone marker is truly standalone (not inside a composite block)
|
||||
|
|
316
client/src/data-provider/__tests__/connection.test.ts
Normal file
316
client/src/data-provider/__tests__/connection.test.ts
Normal file
|
@ -0,0 +1,316 @@
|
|||
import { renderHook, act } from '@testing-library/react';
|
||||
import { useQueryClient } from '@tanstack/react-query';
|
||||
import { useHealthCheck } from '../connection';
|
||||
import { QueryKeys, Time, dataService } from 'librechat-data-provider';
|
||||
|
||||
// Mock dependencies
|
||||
jest.mock('@tanstack/react-query');
|
||||
jest.mock('librechat-data-provider', () => ({
|
||||
QueryKeys: { health: 'health' },
|
||||
Time: { TEN_MINUTES: 600000, FIVE_MINUTES: 300000 },
|
||||
dataService: { healthCheck: jest.fn() },
|
||||
}));
|
||||
|
||||
jest.mock('~/utils', () => ({
|
||||
logger: { log: jest.fn() },
|
||||
}));
|
||||
|
||||
// Mock timers
|
||||
jest.useFakeTimers();
|
||||
|
||||
const mockQueryClient = {
|
||||
fetchQuery: jest.fn(),
|
||||
getQueryState: jest.fn(),
|
||||
getQueryData: jest.fn(),
|
||||
invalidateQueries: jest.fn(),
|
||||
} as any;
|
||||
|
||||
const mockUseQueryClient = useQueryClient as jest.MockedFunction<typeof useQueryClient>;
|
||||
|
||||
describe('useHealthCheck', () => {
|
||||
let addEventListenerSpy: jest.SpyInstance;
|
||||
let removeEventListenerSpy: jest.SpyInstance;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
jest.clearAllTimers();
|
||||
mockUseQueryClient.mockReturnValue(mockQueryClient);
|
||||
|
||||
addEventListenerSpy = jest.spyOn(window, 'addEventListener');
|
||||
removeEventListenerSpy = jest.spyOn(window, 'removeEventListener');
|
||||
|
||||
mockQueryClient.fetchQuery.mockResolvedValue({});
|
||||
mockQueryClient.getQueryState.mockReturnValue(null);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
addEventListenerSpy.mockRestore();
|
||||
removeEventListenerSpy.mockRestore();
|
||||
});
|
||||
|
||||
describe('when not authenticated', () => {
|
||||
it('should not start health check', () => {
|
||||
renderHook(() => useHealthCheck(false));
|
||||
|
||||
// Fast-forward past the delay
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(1000);
|
||||
});
|
||||
|
||||
expect(mockQueryClient.fetchQuery).not.toHaveBeenCalled();
|
||||
expect(addEventListenerSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when authenticated', () => {
|
||||
it('should start health check after delay', async () => {
|
||||
renderHook(() => useHealthCheck(true));
|
||||
|
||||
// Should not run immediately
|
||||
expect(mockQueryClient.fetchQuery).not.toHaveBeenCalled();
|
||||
|
||||
// Should run after 500ms delay
|
||||
await act(async () => {
|
||||
jest.advanceTimersByTime(500);
|
||||
});
|
||||
|
||||
expect(mockQueryClient.fetchQuery).toHaveBeenCalledWith(
|
||||
[QueryKeys.health],
|
||||
expect.any(Function),
|
||||
{
|
||||
retry: false,
|
||||
cacheTime: 0,
|
||||
staleTime: 0,
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
it('should set up 10-minute interval', async () => {
|
||||
renderHook(() => useHealthCheck(true));
|
||||
|
||||
await act(async () => {
|
||||
jest.advanceTimersByTime(500); // Initial delay
|
||||
});
|
||||
|
||||
// Clear the initial call
|
||||
mockQueryClient.fetchQuery.mockClear();
|
||||
|
||||
// Advance by 10 minutes
|
||||
await act(async () => {
|
||||
jest.advanceTimersByTime(Time.TEN_MINUTES);
|
||||
});
|
||||
|
||||
expect(mockQueryClient.fetchQuery).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should run health check continuously every 10 minutes', async () => {
|
||||
renderHook(() => useHealthCheck(true));
|
||||
|
||||
await act(async () => {
|
||||
jest.advanceTimersByTime(500); // Initial delay
|
||||
});
|
||||
|
||||
// Clear the initial call
|
||||
mockQueryClient.fetchQuery.mockClear();
|
||||
|
||||
// Test multiple intervals to ensure it keeps running
|
||||
for (let i = 1; i <= 5; i++) {
|
||||
await act(async () => {
|
||||
jest.advanceTimersByTime(Time.TEN_MINUTES);
|
||||
});
|
||||
|
||||
expect(mockQueryClient.fetchQuery).toHaveBeenCalledTimes(i);
|
||||
}
|
||||
|
||||
// Verify it's been called 5 times total (once per interval)
|
||||
expect(mockQueryClient.fetchQuery).toHaveBeenCalledTimes(5);
|
||||
|
||||
// Test that it continues after longer periods
|
||||
await act(async () => {
|
||||
jest.advanceTimersByTime(Time.TEN_MINUTES * 3); // Advance 30 more minutes
|
||||
});
|
||||
|
||||
// Should have been called 3 more times (total of 8)
|
||||
expect(mockQueryClient.fetchQuery).toHaveBeenCalledTimes(8);
|
||||
});
|
||||
|
||||
it('should add window focus event listener', async () => {
|
||||
renderHook(() => useHealthCheck(true));
|
||||
|
||||
await act(async () => {
|
||||
jest.advanceTimersByTime(500);
|
||||
});
|
||||
|
||||
expect(addEventListenerSpy).toHaveBeenCalledWith('focus', expect.any(Function));
|
||||
});
|
||||
|
||||
it('should handle window focus correctly when no previous check', async () => {
|
||||
renderHook(() => useHealthCheck(true));
|
||||
|
||||
await act(async () => {
|
||||
jest.advanceTimersByTime(500);
|
||||
});
|
||||
|
||||
// Get the focus handler
|
||||
const focusHandler = addEventListenerSpy.mock.calls[0][1];
|
||||
|
||||
// Mock no query state (no previous check)
|
||||
mockQueryClient.getQueryState.mockReturnValue(null);
|
||||
mockQueryClient.fetchQuery.mockClear();
|
||||
|
||||
// Trigger focus event
|
||||
await act(async () => {
|
||||
focusHandler();
|
||||
});
|
||||
|
||||
expect(mockQueryClient.fetchQuery).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should handle window focus correctly when check is recent', async () => {
|
||||
renderHook(() => useHealthCheck(true));
|
||||
|
||||
await act(async () => {
|
||||
jest.advanceTimersByTime(500);
|
||||
});
|
||||
|
||||
// Get the focus handler
|
||||
const focusHandler = addEventListenerSpy.mock.calls[0][1];
|
||||
|
||||
// Mock recent query state (within 10 minutes)
|
||||
mockQueryClient.getQueryState.mockReturnValue({
|
||||
dataUpdatedAt: Date.now() - 300000, // 5 minutes ago
|
||||
});
|
||||
mockQueryClient.fetchQuery.mockClear();
|
||||
|
||||
// Trigger focus event
|
||||
await act(async () => {
|
||||
focusHandler();
|
||||
});
|
||||
|
||||
expect(mockQueryClient.fetchQuery).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle window focus correctly when check is old', async () => {
|
||||
renderHook(() => useHealthCheck(true));
|
||||
|
||||
await act(async () => {
|
||||
jest.advanceTimersByTime(500);
|
||||
});
|
||||
|
||||
// Get the focus handler
|
||||
const focusHandler = addEventListenerSpy.mock.calls[0][1];
|
||||
|
||||
// Mock old query state (older than 10 minutes)
|
||||
mockQueryClient.getQueryState.mockReturnValue({
|
||||
dataUpdatedAt: Date.now() - 700000, // 11+ minutes ago
|
||||
});
|
||||
mockQueryClient.fetchQuery.mockClear();
|
||||
|
||||
// Trigger focus event
|
||||
await act(async () => {
|
||||
focusHandler();
|
||||
});
|
||||
|
||||
expect(mockQueryClient.fetchQuery).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should prevent multiple initializations', async () => {
|
||||
const { rerender } = renderHook(({ auth }) => useHealthCheck(auth), {
|
||||
initialProps: { auth: true },
|
||||
});
|
||||
|
||||
await act(async () => {
|
||||
jest.advanceTimersByTime(500);
|
||||
});
|
||||
|
||||
const initialCallCount = addEventListenerSpy.mock.calls.length;
|
||||
|
||||
// Re-render with same auth state
|
||||
rerender({ auth: true });
|
||||
|
||||
await act(async () => {
|
||||
jest.advanceTimersByTime(500);
|
||||
});
|
||||
|
||||
// Should not add more event listeners
|
||||
expect(addEventListenerSpy).toHaveBeenCalledTimes(initialCallCount);
|
||||
});
|
||||
|
||||
it('should handle API errors gracefully', async () => {
|
||||
const consoleSpy = jest.spyOn(console, 'error').mockImplementation();
|
||||
mockQueryClient.fetchQuery.mockRejectedValue(new Error('API Error'));
|
||||
|
||||
renderHook(() => useHealthCheck(true));
|
||||
|
||||
await act(async () => {
|
||||
jest.advanceTimersByTime(500);
|
||||
});
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledWith('Health check failed:', expect.any(Error));
|
||||
consoleSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('cleanup', () => {
|
||||
it('should clear intervals on unmount', async () => {
|
||||
const { unmount } = renderHook(() => useHealthCheck(true));
|
||||
|
||||
await act(async () => {
|
||||
jest.advanceTimersByTime(500);
|
||||
});
|
||||
|
||||
const clearIntervalSpy = jest.spyOn(global, 'clearInterval');
|
||||
|
||||
unmount();
|
||||
|
||||
expect(clearIntervalSpy).toHaveBeenCalled();
|
||||
clearIntervalSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should remove event listeners on unmount', async () => {
|
||||
const { unmount } = renderHook(() => useHealthCheck(true));
|
||||
|
||||
await act(async () => {
|
||||
jest.advanceTimersByTime(500);
|
||||
});
|
||||
|
||||
unmount();
|
||||
|
||||
expect(removeEventListenerSpy).toHaveBeenCalledWith('focus', expect.any(Function));
|
||||
});
|
||||
|
||||
it('should clear timeout on unmount before initialization', () => {
|
||||
const clearTimeoutSpy = jest.spyOn(global, 'clearTimeout');
|
||||
const { unmount } = renderHook(() => useHealthCheck(true));
|
||||
|
||||
// Unmount before delay completes
|
||||
unmount();
|
||||
|
||||
expect(clearTimeoutSpy).toHaveBeenCalled();
|
||||
clearTimeoutSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('authentication state changes', () => {
|
||||
it('should start health check when authentication becomes true', async () => {
|
||||
const { rerender } = renderHook(({ auth }) => useHealthCheck(auth), {
|
||||
initialProps: { auth: false },
|
||||
});
|
||||
|
||||
// Should not start when false
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(1000);
|
||||
});
|
||||
expect(mockQueryClient.fetchQuery).not.toHaveBeenCalled();
|
||||
|
||||
// Should start when becomes true
|
||||
rerender({ auth: true });
|
||||
|
||||
await act(async () => {
|
||||
jest.advanceTimersByTime(500);
|
||||
});
|
||||
|
||||
expect(mockQueryClient.fetchQuery).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,31 +1,84 @@
|
|||
import { useCallback, useRef } from 'react';
|
||||
import { useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { useCallback, useRef, useEffect } from 'react';
|
||||
import { useQueryClient } from '@tanstack/react-query';
|
||||
import { QueryKeys, Time, dataService } from 'librechat-data-provider';
|
||||
import { logger } from '~/utils';
|
||||
|
||||
export const useHealthCheck = () => {
|
||||
useQuery([QueryKeys.health], () => dataService.healthCheck(), {
|
||||
refetchInterval: Time.TEN_MINUTES,
|
||||
retry: false,
|
||||
onError: (error) => {
|
||||
console.error('Health check failed:', error);
|
||||
},
|
||||
cacheTime: 0,
|
||||
staleTime: 0,
|
||||
refetchOnWindowFocus: (query) => {
|
||||
if (!query.state.dataUpdatedAt) {
|
||||
return true;
|
||||
export const useHealthCheck = (isAuthenticated = false) => {
|
||||
const queryClient = useQueryClient();
|
||||
const isInitialized = useRef(false);
|
||||
const intervalRef = useRef<NodeJS.Timeout | null>(null);
|
||||
const focusHandlerRef = useRef<(() => Promise<void>) | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
// Only start health check if authenticated
|
||||
if (!isAuthenticated) {
|
||||
return;
|
||||
}
|
||||
|
||||
const lastUpdated = new Date(query.state.dataUpdatedAt);
|
||||
// Prevent multiple initializations
|
||||
if (isInitialized.current) {
|
||||
return;
|
||||
}
|
||||
isInitialized.current = true;
|
||||
|
||||
// Use a longer delay to ensure all rendering is complete
|
||||
const initTimer = setTimeout(() => {
|
||||
const performHealthCheck = async () => {
|
||||
try {
|
||||
await queryClient.fetchQuery([QueryKeys.health], () => dataService.healthCheck(), {
|
||||
retry: false,
|
||||
cacheTime: 0,
|
||||
staleTime: 0,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Health check failed:', error);
|
||||
}
|
||||
};
|
||||
|
||||
// Initial check
|
||||
performHealthCheck();
|
||||
|
||||
// Set up interval for recurring checks
|
||||
intervalRef.current = setInterval(performHealthCheck, Time.TEN_MINUTES);
|
||||
|
||||
// Set up window focus handler
|
||||
const handleWindowFocus = async () => {
|
||||
const queryState = queryClient.getQueryState([QueryKeys.health]);
|
||||
|
||||
if (!queryState?.dataUpdatedAt) {
|
||||
await performHealthCheck();
|
||||
return;
|
||||
}
|
||||
|
||||
const lastUpdated = new Date(queryState.dataUpdatedAt);
|
||||
const tenMinutesAgo = new Date(Date.now() - Time.TEN_MINUTES);
|
||||
|
||||
logger.log(`Last health check: ${lastUpdated.toISOString()}`);
|
||||
logger.log(`Ten minutes ago: ${tenMinutesAgo.toISOString()}`);
|
||||
|
||||
return lastUpdated < tenMinutesAgo;
|
||||
},
|
||||
});
|
||||
if (lastUpdated < tenMinutesAgo) {
|
||||
await performHealthCheck();
|
||||
}
|
||||
};
|
||||
|
||||
// Store handler for cleanup
|
||||
focusHandlerRef.current = handleWindowFocus;
|
||||
window.addEventListener('focus', handleWindowFocus);
|
||||
}, 500);
|
||||
|
||||
return () => {
|
||||
clearTimeout(initTimer);
|
||||
if (intervalRef.current) {
|
||||
clearInterval(intervalRef.current);
|
||||
intervalRef.current = null;
|
||||
}
|
||||
// Remove focus event listener if it was added
|
||||
if (focusHandlerRef.current) {
|
||||
window.removeEventListener('focus', focusHandlerRef.current);
|
||||
focusHandlerRef.current = null;
|
||||
}
|
||||
};
|
||||
}, [isAuthenticated, queryClient]);
|
||||
};
|
||||
|
||||
export const useInteractionHealthCheck = () => {
|
||||
|
|
|
@ -245,7 +245,8 @@ export default function useChatFunctions({
|
|||
const generation = editedText ?? latestMessage?.text ?? '';
|
||||
const responseText = isEditOrContinue ? generation : '';
|
||||
|
||||
const responseMessageId = editedMessageId ?? latestMessage?.messageId + '_' ?? null;
|
||||
const responseMessageId =
|
||||
editedMessageId ?? (latestMessage?.messageId ? latestMessage?.messageId + '_' : null) ?? null;
|
||||
const initialResponse: TMessage = {
|
||||
sender: responseSender,
|
||||
text: responseText,
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
export { default as useAvatar } from './useAvatar';
|
||||
export { default as useProgress } from './useProgress';
|
||||
export { default as useAttachments } from './useAttachments';
|
||||
export { default as useSubmitMessage } from './useSubmitMessage';
|
||||
export { default as useMessageActions } from './useMessageActions';
|
||||
export { default as useMessageProcess } from './useMessageProcess';
|
||||
|
|
26
client/src/hooks/Messages/useAttachments.ts
Normal file
26
client/src/hooks/Messages/useAttachments.ts
Normal file
|
@ -0,0 +1,26 @@
|
|||
import { useMemo } from 'react';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import type { TAttachment } from 'librechat-data-provider';
|
||||
import { useSearchResultsByTurn } from './useSearchResultsByTurn';
|
||||
import store from '~/store';
|
||||
|
||||
export default function useAttachments({
|
||||
messageId,
|
||||
attachments,
|
||||
}: {
|
||||
messageId?: string;
|
||||
attachments?: TAttachment[];
|
||||
}) {
|
||||
const messageAttachmentsMap = useRecoilValue(store.messageAttachmentsMap);
|
||||
const messageAttachments = useMemo(
|
||||
() => attachments ?? messageAttachmentsMap[messageId ?? ''] ?? [],
|
||||
[attachments, messageAttachmentsMap, messageId],
|
||||
);
|
||||
|
||||
const searchResults = useSearchResultsByTurn(messageAttachments);
|
||||
|
||||
return {
|
||||
attachments: messageAttachments,
|
||||
searchResults,
|
||||
};
|
||||
}
|
494
client/src/hooks/Messages/useCopyToClipboard.spec.ts
Normal file
494
client/src/hooks/Messages/useCopyToClipboard.spec.ts
Normal file
|
@ -0,0 +1,494 @@
|
|||
import { renderHook, act } from '@testing-library/react';
|
||||
import copy from 'copy-to-clipboard';
|
||||
import { ContentTypes } from 'librechat-data-provider';
|
||||
import type {
|
||||
SearchResultData,
|
||||
ProcessedOrganic,
|
||||
TMessageContentParts,
|
||||
} from 'librechat-data-provider';
|
||||
import useCopyToClipboard from '~/hooks/Messages/useCopyToClipboard';
|
||||
|
||||
// Mock the copy-to-clipboard module
|
||||
jest.mock('copy-to-clipboard');
|
||||
|
||||
describe('useCopyToClipboard', () => {
|
||||
const mockSetIsCopied = jest.fn();
|
||||
const mockCopy = copy as jest.MockedFunction<typeof copy>;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
jest.useFakeTimers();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.runOnlyPendingTimers();
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
describe('Basic functionality', () => {
|
||||
it('should copy plain text without citations', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useCopyToClipboard({
|
||||
text: 'Simple text without citations',
|
||||
}),
|
||||
);
|
||||
|
||||
act(() => {
|
||||
result.current(mockSetIsCopied);
|
||||
});
|
||||
|
||||
expect(mockCopy).toHaveBeenCalledWith('Simple text without citations', {
|
||||
format: 'text/plain',
|
||||
});
|
||||
expect(mockSetIsCopied).toHaveBeenCalledWith(true);
|
||||
});
|
||||
|
||||
it('should handle content array with text types', () => {
|
||||
const content = [
|
||||
{ type: ContentTypes.TEXT, text: 'First line' },
|
||||
{ type: ContentTypes.TEXT, text: 'Second line' },
|
||||
];
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useCopyToClipboard({
|
||||
content: content as TMessageContentParts[],
|
||||
}),
|
||||
);
|
||||
|
||||
act(() => {
|
||||
result.current(mockSetIsCopied);
|
||||
});
|
||||
|
||||
expect(mockCopy).toHaveBeenCalledWith('First line\nSecond line', {
|
||||
format: 'text/plain',
|
||||
});
|
||||
});
|
||||
|
||||
it('should reset isCopied after timeout', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useCopyToClipboard({
|
||||
text: 'Test text',
|
||||
}),
|
||||
);
|
||||
|
||||
act(() => {
|
||||
result.current(mockSetIsCopied);
|
||||
});
|
||||
|
||||
expect(mockSetIsCopied).toHaveBeenCalledWith(true);
|
||||
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(3000);
|
||||
});
|
||||
|
||||
expect(mockSetIsCopied).toHaveBeenCalledWith(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Citation formatting', () => {
|
||||
const mockSearchResults: { [key: string]: SearchResultData } = {
|
||||
'0': {
|
||||
organic: [
|
||||
{
|
||||
link: 'https://example.com/search1',
|
||||
title: 'Search Result 1',
|
||||
snippet: 'This is a search result',
|
||||
},
|
||||
],
|
||||
topStories: [
|
||||
{
|
||||
link: 'https://example.com/news1',
|
||||
title: 'News Story 1',
|
||||
},
|
||||
{
|
||||
link: 'https://example.com/news2',
|
||||
title: 'News Story 2',
|
||||
},
|
||||
],
|
||||
images: [
|
||||
{
|
||||
link: 'https://example.com/image1',
|
||||
title: 'Image 1',
|
||||
},
|
||||
],
|
||||
videos: [
|
||||
{
|
||||
link: 'https://example.com/video1',
|
||||
title: 'Video 1',
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
it('should format standalone search citations', () => {
|
||||
const text = 'This is a fact \\ue202turn0search0 from search.';
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useCopyToClipboard({
|
||||
text,
|
||||
searchResults: mockSearchResults,
|
||||
}),
|
||||
);
|
||||
|
||||
act(() => {
|
||||
result.current(mockSetIsCopied);
|
||||
});
|
||||
|
||||
const expectedText = `This is a fact [1] from search.
|
||||
|
||||
Citations:
|
||||
[1] https://example.com/search1
|
||||
`;
|
||||
|
||||
expect(mockCopy).toHaveBeenCalledWith(expectedText, { format: 'text/plain' });
|
||||
});
|
||||
|
||||
it('should format news citations with correct mapping', () => {
|
||||
const text = 'Breaking news \\ue202turn0news0 and more news \\ue202turn0news1.';
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useCopyToClipboard({
|
||||
text,
|
||||
searchResults: mockSearchResults,
|
||||
}),
|
||||
);
|
||||
|
||||
act(() => {
|
||||
result.current(mockSetIsCopied);
|
||||
});
|
||||
|
||||
const expectedText = `Breaking news [1] and more news [2].
|
||||
|
||||
Citations:
|
||||
[1] https://example.com/news1
|
||||
[2] https://example.com/news2
|
||||
`;
|
||||
|
||||
expect(mockCopy).toHaveBeenCalledWith(expectedText, { format: 'text/plain' });
|
||||
});
|
||||
|
||||
it('should handle highlighted text with citations', () => {
|
||||
const text = '\\ue203This is highlighted text\\ue204 \\ue202turn0search0 with citation.';
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useCopyToClipboard({
|
||||
text,
|
||||
searchResults: mockSearchResults,
|
||||
}),
|
||||
);
|
||||
|
||||
act(() => {
|
||||
result.current(mockSetIsCopied);
|
||||
});
|
||||
|
||||
const expectedText = `**This is highlighted text** [1] with citation.
|
||||
|
||||
Citations:
|
||||
[1] https://example.com/search1
|
||||
`;
|
||||
|
||||
expect(mockCopy).toHaveBeenCalledWith(expectedText, { format: 'text/plain' });
|
||||
});
|
||||
|
||||
it('should handle composite citations', () => {
|
||||
const text =
|
||||
'Multiple sources \\ue200\\ue202turn0search0\\ue202turn0news0\\ue202turn0news1\\ue201.';
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useCopyToClipboard({
|
||||
text,
|
||||
searchResults: mockSearchResults,
|
||||
}),
|
||||
);
|
||||
|
||||
act(() => {
|
||||
result.current(mockSetIsCopied);
|
||||
});
|
||||
|
||||
const expectedText = `Multiple sources [1][2][3].
|
||||
|
||||
Citations:
|
||||
[1] https://example.com/search1
|
||||
[2] https://example.com/news1
|
||||
[3] https://example.com/news2
|
||||
`;
|
||||
|
||||
expect(mockCopy).toHaveBeenCalledWith(expectedText, { format: 'text/plain' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('Citation deduplication', () => {
|
||||
it('should use same number for duplicate URLs', () => {
|
||||
const mockSearchResultsWithDupes: { [key: string]: SearchResultData } = {
|
||||
'0': {
|
||||
organic: [
|
||||
{
|
||||
link: 'https://example.com/article',
|
||||
title: 'Article from search',
|
||||
},
|
||||
],
|
||||
topStories: [
|
||||
{
|
||||
link: 'https://example.com/article', // Same URL
|
||||
title: 'Article from news',
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
const text = 'First citation \\ue202turn0search0 and second \\ue202turn0news0.';
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useCopyToClipboard({
|
||||
text,
|
||||
searchResults: mockSearchResultsWithDupes,
|
||||
}),
|
||||
);
|
||||
|
||||
act(() => {
|
||||
result.current(mockSetIsCopied);
|
||||
});
|
||||
|
||||
const expectedText = `First citation [1] and second [1].
|
||||
|
||||
Citations:
|
||||
[1] https://example.com/article
|
||||
`;
|
||||
|
||||
expect(mockCopy).toHaveBeenCalledWith(expectedText, { format: 'text/plain' });
|
||||
});
|
||||
|
||||
it('should handle multiple citations of the same source', () => {
|
||||
const mockSearchResults: { [key: string]: SearchResultData } = {
|
||||
'0': {
|
||||
organic: [
|
||||
{
|
||||
link: 'https://example.com/source1',
|
||||
title: 'Source 1',
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
const text =
|
||||
'First mention \\ue202turn0search0. Second mention \\ue202turn0search0. Third \\ue202turn0search0.';
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useCopyToClipboard({
|
||||
text,
|
||||
searchResults: mockSearchResults,
|
||||
}),
|
||||
);
|
||||
|
||||
act(() => {
|
||||
result.current(mockSetIsCopied);
|
||||
});
|
||||
|
||||
const expectedText = `First mention [1]. Second mention [1]. Third [1].
|
||||
|
||||
Citations:
|
||||
[1] https://example.com/source1
|
||||
`;
|
||||
|
||||
expect(mockCopy).toHaveBeenCalledWith(expectedText, { format: 'text/plain' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases', () => {
|
||||
it('should handle missing search results gracefully', () => {
|
||||
const text = 'Text with citation \\ue202turn0search0 but no data.';
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useCopyToClipboard({
|
||||
text,
|
||||
searchResults: {},
|
||||
}),
|
||||
);
|
||||
|
||||
act(() => {
|
||||
result.current(mockSetIsCopied);
|
||||
});
|
||||
|
||||
// Updated expectation: Citation marker should be removed
|
||||
expect(mockCopy).toHaveBeenCalledWith('Text with citation but no data.', {
|
||||
format: 'text/plain',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle invalid citation indices', () => {
|
||||
const mockSearchResults: { [key: string]: SearchResultData } = {
|
||||
'0': {
|
||||
organic: [
|
||||
{
|
||||
link: 'https://example.com/search1',
|
||||
title: 'Search Result 1',
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
const text = 'Valid citation \\ue202turn0search0 and invalid \\ue202turn0search5.';
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useCopyToClipboard({
|
||||
text,
|
||||
searchResults: mockSearchResults,
|
||||
}),
|
||||
);
|
||||
|
||||
act(() => {
|
||||
result.current(mockSetIsCopied);
|
||||
});
|
||||
|
||||
// Updated expectation: Invalid citation marker should be removed
|
||||
const expectedText = `Valid citation [1] and invalid.
|
||||
|
||||
Citations:
|
||||
[1] https://example.com/search1
|
||||
`;
|
||||
|
||||
expect(mockCopy).toHaveBeenCalledWith(expectedText, { format: 'text/plain' });
|
||||
});
|
||||
|
||||
it('should handle citations without links', () => {
|
||||
const mockSearchResults: { [key: string]: SearchResultData } = {
|
||||
'0': {
|
||||
organic: [
|
||||
{
|
||||
title: 'No link source',
|
||||
// No link property
|
||||
} as ProcessedOrganic,
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
const text = 'Citation without link \\ue202turn0search0.';
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useCopyToClipboard({
|
||||
text,
|
||||
searchResults: mockSearchResults,
|
||||
}),
|
||||
);
|
||||
|
||||
act(() => {
|
||||
result.current(mockSetIsCopied);
|
||||
});
|
||||
|
||||
// Updated expectation: Citation marker without link should be removed
|
||||
expect(mockCopy).toHaveBeenCalledWith('Citation without link.', {
|
||||
format: 'text/plain',
|
||||
});
|
||||
});
|
||||
|
||||
it('should clean up orphaned citation lists at the end', () => {
|
||||
const mockSearchResults: { [key: string]: SearchResultData } = {
|
||||
'0': {
|
||||
organic: [
|
||||
{ link: 'https://example.com/1', title: 'Source 1' },
|
||||
{ link: 'https://example.com/2', title: 'Source 2' },
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
const text = 'Text with citations \\ue202turn0search0.\n\n[1][2]';
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useCopyToClipboard({
|
||||
text,
|
||||
searchResults: mockSearchResults,
|
||||
}),
|
||||
);
|
||||
|
||||
act(() => {
|
||||
result.current(mockSetIsCopied);
|
||||
});
|
||||
|
||||
const expectedText = `Text with citations [1].
|
||||
|
||||
Citations:
|
||||
[1] https://example.com/1
|
||||
`;
|
||||
|
||||
expect(mockCopy).toHaveBeenCalledWith(expectedText, { format: 'text/plain' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('All citation types', () => {
|
||||
const mockSearchResults: { [key: string]: SearchResultData } = {
|
||||
'0': {
|
||||
organic: [{ link: 'https://example.com/search', title: 'Search' }],
|
||||
topStories: [{ link: 'https://example.com/news', title: 'News' }],
|
||||
images: [{ link: 'https://example.com/image', title: 'Image' }],
|
||||
videos: [{ link: 'https://example.com/video', title: 'Video' }],
|
||||
references: [{ link: 'https://example.com/ref', title: 'Reference', type: 'link' }],
|
||||
},
|
||||
};
|
||||
|
||||
it('should handle all citation types correctly', () => {
|
||||
const text =
|
||||
'Search \\ue202turn0search0, news \\ue202turn0news0, image \\ue202turn0image0, video \\ue202turn0video0, ref \\ue202turn0ref0.';
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useCopyToClipboard({
|
||||
text,
|
||||
searchResults: mockSearchResults,
|
||||
}),
|
||||
);
|
||||
|
||||
act(() => {
|
||||
result.current(mockSetIsCopied);
|
||||
});
|
||||
|
||||
const expectedText = `Search [1], news [2], image [3], video [4], ref [5].
|
||||
|
||||
Citations:
|
||||
[1] https://example.com/search
|
||||
[2] https://example.com/news
|
||||
[3] https://example.com/image
|
||||
[4] https://example.com/video
|
||||
[5] https://example.com/ref
|
||||
`;
|
||||
|
||||
expect(mockCopy).toHaveBeenCalledWith(expectedText, { format: 'text/plain' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('Complex scenarios', () => {
|
||||
it('should handle mixed highlighted text and composite citations', () => {
|
||||
const mockSearchResults: { [key: string]: SearchResultData } = {
|
||||
'0': {
|
||||
organic: [
|
||||
{ link: 'https://example.com/1', title: 'Source 1' },
|
||||
{ link: 'https://example.com/2', title: 'Source 2' },
|
||||
],
|
||||
topStories: [{ link: 'https://example.com/3', title: 'News 1' }],
|
||||
},
|
||||
};
|
||||
|
||||
const text =
|
||||
'\\ue203Highlighted text with citation\\ue204 \\ue202turn0search0 and composite \\ue200\\ue202turn0search1\\ue202turn0news0\\ue201.';
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useCopyToClipboard({
|
||||
text,
|
||||
searchResults: mockSearchResults,
|
||||
}),
|
||||
);
|
||||
|
||||
act(() => {
|
||||
result.current(mockSetIsCopied);
|
||||
});
|
||||
|
||||
const expectedText = `**Highlighted text with citation** [1] and composite [2][3].
|
||||
|
||||
Citations:
|
||||
[1] https://example.com/1
|
||||
[2] https://example.com/2
|
||||
[3] https://example.com/3
|
||||
`;
|
||||
|
||||
expect(mockCopy).toHaveBeenCalledWith(expectedText, { format: 'text/plain' });
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,13 +1,41 @@
|
|||
import { useCallback, useEffect, useRef } from 'react';
|
||||
import copy from 'copy-to-clipboard';
|
||||
import { ContentTypes } from 'librechat-data-provider';
|
||||
import { ContentTypes, SearchResultData } from 'librechat-data-provider';
|
||||
import type { TMessage } from 'librechat-data-provider';
|
||||
import {
|
||||
SPAN_REGEX,
|
||||
CLEANUP_REGEX,
|
||||
COMPOSITE_REGEX,
|
||||
STANDALONE_PATTERN,
|
||||
INVALID_CITATION_REGEX,
|
||||
} from '~/utils/citations';
|
||||
|
||||
type Source = {
|
||||
link: string;
|
||||
title: string;
|
||||
attribution?: string;
|
||||
type: string;
|
||||
typeIndex: number;
|
||||
citationKey: string; // Used for deduplication
|
||||
};
|
||||
|
||||
const refTypeMap: Record<string, string> = {
|
||||
search: 'organic',
|
||||
ref: 'references',
|
||||
news: 'topStories',
|
||||
image: 'images',
|
||||
video: 'videos',
|
||||
};
|
||||
|
||||
export default function useCopyToClipboard({
|
||||
text,
|
||||
content,
|
||||
}: Partial<Pick<TMessage, 'text' | 'content'>>) {
|
||||
searchResults,
|
||||
}: Partial<Pick<TMessage, 'text' | 'content'>> & {
|
||||
searchResults?: { [key: string]: SearchResultData };
|
||||
}) {
|
||||
const copyTimeoutRef = useRef<NodeJS.Timeout | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (copyTimeoutRef.current) {
|
||||
|
@ -22,6 +50,8 @@ export default function useCopyToClipboard({
|
|||
clearTimeout(copyTimeoutRef.current);
|
||||
}
|
||||
setIsCopied(true);
|
||||
|
||||
// Get the message text from content or text
|
||||
let messageText = text ?? '';
|
||||
if (content) {
|
||||
messageText = content.reduce((acc, curr, i) => {
|
||||
|
@ -32,14 +62,282 @@ export default function useCopyToClipboard({
|
|||
return acc;
|
||||
}, '');
|
||||
}
|
||||
copy(messageText, { format: 'text/plain' });
|
||||
|
||||
// Early return if no search data
|
||||
if (!searchResults || Object.keys(searchResults).length === 0) {
|
||||
// Clean up any citation markers before returning
|
||||
const cleanedText = messageText
|
||||
.replace(INVALID_CITATION_REGEX, '')
|
||||
.replace(CLEANUP_REGEX, '');
|
||||
|
||||
copy(cleanedText, { format: 'text/plain' });
|
||||
copyTimeoutRef.current = setTimeout(() => {
|
||||
setIsCopied(false);
|
||||
}, 3000);
|
||||
return;
|
||||
}
|
||||
|
||||
// Process citations and build a citation manager
|
||||
const citationManager = processCitations(messageText, searchResults);
|
||||
let processedText = citationManager.formattedText;
|
||||
|
||||
// Add citations list at the end if we have any
|
||||
if (citationManager.citations.size > 0) {
|
||||
processedText += '\n\nCitations:\n';
|
||||
// Sort citations by their reference number
|
||||
const sortedCitations = Array.from(citationManager.citations.entries()).sort(
|
||||
(a, b) => a[1].referenceNumber - b[1].referenceNumber,
|
||||
);
|
||||
|
||||
// Add each citation to the text
|
||||
for (const [_, citation] of sortedCitations) {
|
||||
processedText += `[${citation.referenceNumber}] ${citation.link}\n`;
|
||||
}
|
||||
}
|
||||
|
||||
copy(processedText, { format: 'text/plain' });
|
||||
copyTimeoutRef.current = setTimeout(() => {
|
||||
setIsCopied(false);
|
||||
}, 3000);
|
||||
},
|
||||
[text, content],
|
||||
[text, content, searchResults],
|
||||
);
|
||||
|
||||
return copyToClipboard;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process citations in the text and format them properly
|
||||
*/
|
||||
function processCitations(text: string, searchResults: { [key: string]: SearchResultData }) {
|
||||
// Maps citation keys to their info including reference numbers
|
||||
const citations = new Map<
|
||||
string,
|
||||
{
|
||||
referenceNumber: number;
|
||||
link: string;
|
||||
title?: string;
|
||||
source: Source;
|
||||
}
|
||||
>();
|
||||
|
||||
// Map to track URLs to citation keys for deduplication
|
||||
const urlToCitationKey = new Map<string, string>();
|
||||
|
||||
let nextReferenceNumber = 1;
|
||||
let formattedText = text;
|
||||
|
||||
// Step 1: Process highlighted text first (simplify by just making it bold in markdown)
|
||||
formattedText = formattedText.replace(SPAN_REGEX, (match) => {
|
||||
const text = match.replace(/\\ue203|\\ue204/g, '');
|
||||
return `**${text}**`;
|
||||
});
|
||||
|
||||
// Step 2: Find all standalone citations and composite citation blocks
|
||||
const allCitations: Array<{
|
||||
turn: string;
|
||||
type: string;
|
||||
index: string;
|
||||
position: number;
|
||||
fullMatch: string;
|
||||
isComposite: boolean;
|
||||
}> = [];
|
||||
|
||||
// Find standalone citations
|
||||
let standaloneMatch: RegExpExecArray | null;
|
||||
const standaloneCopy = new RegExp(STANDALONE_PATTERN.source, 'g');
|
||||
while ((standaloneMatch = standaloneCopy.exec(formattedText)) !== null) {
|
||||
allCitations.push({
|
||||
turn: standaloneMatch[1],
|
||||
type: standaloneMatch[2],
|
||||
index: standaloneMatch[3],
|
||||
position: standaloneMatch.index,
|
||||
fullMatch: standaloneMatch[0],
|
||||
isComposite: false,
|
||||
});
|
||||
}
|
||||
|
||||
// Find composite citation blocks
|
||||
let compositeMatch: RegExpExecArray | null;
|
||||
const compositeCopy = new RegExp(COMPOSITE_REGEX.source, 'g');
|
||||
while ((compositeMatch = compositeCopy.exec(formattedText)) !== null) {
|
||||
const block = compositeMatch[0];
|
||||
const blockStart = compositeMatch.index;
|
||||
|
||||
// Extract individual citations within the composite block
|
||||
let citationMatch: RegExpExecArray | null;
|
||||
const citationPattern = new RegExp(STANDALONE_PATTERN.source, 'g');
|
||||
while ((citationMatch = citationPattern.exec(block)) !== null) {
|
||||
allCitations.push({
|
||||
turn: citationMatch[1],
|
||||
type: citationMatch[2],
|
||||
index: citationMatch[3],
|
||||
position: blockStart + citationMatch.index,
|
||||
fullMatch: block, // Store the full composite block
|
||||
isComposite: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Sort citations by their position in the text
|
||||
allCitations.sort((a, b) => a.position - b.position);
|
||||
|
||||
// Step 3: Process each citation and build the reference mapping
|
||||
const processedCitations = new Set<string>();
|
||||
const replacements: Array<[string, string]> = [];
|
||||
const compositeCitationsMap = new Map<string, number[]>();
|
||||
|
||||
for (const citation of allCitations) {
|
||||
const { turn, type, index, fullMatch, isComposite } = citation;
|
||||
const searchData = searchResults[turn];
|
||||
|
||||
if (!searchData) continue;
|
||||
|
||||
const dataType = refTypeMap[type.toLowerCase()] || type.toLowerCase();
|
||||
const idx = parseInt(index, 10);
|
||||
|
||||
// Skip if no matching data
|
||||
if (!searchData[dataType] || !searchData[dataType][idx]) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Get source data
|
||||
const sourceData = searchData[dataType][idx];
|
||||
const sourceUrl = sourceData.link || '';
|
||||
|
||||
// Skip if no link
|
||||
if (!sourceUrl) continue;
|
||||
|
||||
// Check if this URL has already been cited
|
||||
let citationKey = urlToCitationKey.get(sourceUrl);
|
||||
|
||||
// If not, create a new citation key
|
||||
if (!citationKey) {
|
||||
citationKey = `${turn}-${dataType}-${idx}`;
|
||||
urlToCitationKey.set(sourceUrl, citationKey);
|
||||
}
|
||||
|
||||
const source: Source = {
|
||||
link: sourceUrl,
|
||||
title: sourceData.title || sourceData.name || '',
|
||||
attribution: sourceData.attribution || sourceData.source || '',
|
||||
type: dataType,
|
||||
typeIndex: idx,
|
||||
citationKey,
|
||||
};
|
||||
|
||||
// Skip if already processed this citation in a composite block
|
||||
if (isComposite && processedCitations.has(fullMatch)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let referenceText = '';
|
||||
|
||||
// Check if this source has been cited before
|
||||
let existingCitation = citations.get(citationKey);
|
||||
|
||||
if (!existingCitation) {
|
||||
// New citation
|
||||
existingCitation = {
|
||||
referenceNumber: nextReferenceNumber++,
|
||||
link: source.link,
|
||||
title: source.title,
|
||||
source,
|
||||
};
|
||||
citations.set(citationKey, existingCitation);
|
||||
}
|
||||
|
||||
if (existingCitation) {
|
||||
// For composite blocks, we need to find all citations and create a combined reference
|
||||
if (isComposite) {
|
||||
// Parse all citations in this composite block if we haven't processed it yet
|
||||
if (!processedCitations.has(fullMatch)) {
|
||||
const compositeCitations: number[] = [];
|
||||
let citationMatch: RegExpExecArray | null;
|
||||
const citationPattern = new RegExp(STANDALONE_PATTERN.source, 'g');
|
||||
|
||||
while ((citationMatch = citationPattern.exec(fullMatch)) !== null) {
|
||||
const cTurn = citationMatch[1];
|
||||
const cType = citationMatch[2];
|
||||
const cIndex = citationMatch[3];
|
||||
const cDataType = refTypeMap[cType.toLowerCase()] || cType.toLowerCase();
|
||||
|
||||
const cSource = searchResults[cTurn]?.[cDataType]?.[parseInt(cIndex, 10)];
|
||||
if (cSource && cSource.link) {
|
||||
// Check if we've already created a citation for this URL
|
||||
const cUrl = cSource.link;
|
||||
let cKey = urlToCitationKey.get(cUrl);
|
||||
|
||||
if (!cKey) {
|
||||
cKey = `${cTurn}-${cDataType}-${cIndex}`;
|
||||
urlToCitationKey.set(cUrl, cKey);
|
||||
}
|
||||
|
||||
let cCitation = citations.get(cKey);
|
||||
|
||||
if (!cCitation) {
|
||||
cCitation = {
|
||||
referenceNumber: nextReferenceNumber++,
|
||||
link: cSource.link,
|
||||
title: cSource.title || cSource.name || '',
|
||||
source: {
|
||||
link: cSource.link,
|
||||
title: cSource.title || cSource.name || '',
|
||||
attribution: cSource.attribution || cSource.source || '',
|
||||
type: cDataType,
|
||||
typeIndex: parseInt(cIndex, 10),
|
||||
citationKey: cKey,
|
||||
},
|
||||
};
|
||||
citations.set(cKey, cCitation);
|
||||
}
|
||||
|
||||
if (cCitation) {
|
||||
compositeCitations.push(cCitation.referenceNumber);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort and deduplicate the composite citations
|
||||
const uniqueSortedCitations = [...new Set(compositeCitations)].sort((a, b) => a - b);
|
||||
|
||||
// Create combined reference numbers for all citations in this composite
|
||||
referenceText =
|
||||
uniqueSortedCitations.length > 0
|
||||
? uniqueSortedCitations.map((num) => `[${num}]`).join('')
|
||||
: '';
|
||||
|
||||
processedCitations.add(fullMatch);
|
||||
compositeCitationsMap.set(fullMatch, uniqueSortedCitations);
|
||||
replacements.push([fullMatch, referenceText]);
|
||||
}
|
||||
|
||||
// Skip further processing since we've handled the entire composite block
|
||||
continue;
|
||||
} else {
|
||||
// Single citation
|
||||
referenceText = `[${existingCitation.referenceNumber}]`;
|
||||
replacements.push([fullMatch, referenceText]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Step 4: Apply all replacements (from longest to shortest to avoid nested replacement issues)
|
||||
replacements.sort((a, b) => b[0].length - a[0].length);
|
||||
for (const [pattern, replacement] of replacements) {
|
||||
formattedText = formattedText.replace(pattern, replacement);
|
||||
}
|
||||
|
||||
// Step 5: Remove any orphaned composite blocks at the end of the text
|
||||
// This prevents the [1][2][3][4] list that might appear at the end if there's a composite there
|
||||
formattedText = formattedText.replace(/\n\s*\[\d+\](\[\d+\])*\s*$/g, '');
|
||||
|
||||
// Step 6: Clean up any remaining citation markers
|
||||
formattedText = formattedText.replace(INVALID_CITATION_REGEX, '');
|
||||
formattedText = formattedText.replace(CLEANUP_REGEX, '');
|
||||
|
||||
return {
|
||||
formattedText,
|
||||
citations,
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import { useRecoilValue } from 'recoil';
|
||||
import { useCallback, useMemo } from 'react';
|
||||
import { isAssistantsEndpoint, isAgentsEndpoint } from 'librechat-data-provider';
|
||||
import type { SearchResultData } from 'librechat-data-provider';
|
||||
import type { TMessageProps } from '~/common';
|
||||
import {
|
||||
useChatContext,
|
||||
|
@ -18,12 +19,14 @@ export type TMessageActions = Pick<
|
|||
'message' | 'currentEditId' | 'setCurrentEditId'
|
||||
> & {
|
||||
isMultiMessage?: boolean;
|
||||
searchResults?: { [key: string]: SearchResultData };
|
||||
};
|
||||
|
||||
export default function useMessageActions(props: TMessageActions) {
|
||||
const localize = useLocalize();
|
||||
const { user } = useAuthContext();
|
||||
const UsernameDisplay = useRecoilValue<boolean>(store.UsernameDisplay);
|
||||
const { message, currentEditId, setCurrentEditId, isMultiMessage } = props;
|
||||
const { message, currentEditId, setCurrentEditId, isMultiMessage, searchResults } = props;
|
||||
|
||||
const {
|
||||
ask,
|
||||
|
@ -96,7 +99,7 @@ export default function useMessageActions(props: TMessageActions) {
|
|||
regenerate(message);
|
||||
}, [isSubmitting, isCreatedByUser, message, regenerate]);
|
||||
|
||||
const copyToClipboard = useCopyToClipboard({ text, content });
|
||||
const copyToClipboard = useCopyToClipboard({ text, content, searchResults });
|
||||
|
||||
const messageLabel = useMemo(() => {
|
||||
if (message?.isCreatedByUser === true) {
|
||||
|
|
|
@ -3,12 +3,7 @@ import { useParams } from 'react-router-dom';
|
|||
import { Constants, EModelEndpoint } from 'librechat-data-provider';
|
||||
import { useGetModelsQuery } from 'librechat-data-provider/react-query';
|
||||
import type { TPreset } from 'librechat-data-provider';
|
||||
import {
|
||||
useHealthCheck,
|
||||
useGetConvoIdQuery,
|
||||
useGetStartupConfig,
|
||||
useGetEndpointsQuery,
|
||||
} from '~/data-provider';
|
||||
import { useGetConvoIdQuery, useGetStartupConfig, useGetEndpointsQuery } from '~/data-provider';
|
||||
import { useNewConvo, useAppStartup, useAssistantListMap, useIdChangeEffect } from '~/hooks';
|
||||
import { getDefaultModelSpec, getModelSpecPreset, logger } from '~/utils';
|
||||
import { ToolCallsMapProvider } from '~/Providers';
|
||||
|
@ -20,9 +15,9 @@ import { useRecoilCallback } from 'recoil';
|
|||
import store from '~/store';
|
||||
|
||||
export default function ChatRoute() {
|
||||
useHealthCheck();
|
||||
const { data: startupConfig } = useGetStartupConfig();
|
||||
const { isAuthenticated, user } = useAuthRedirect();
|
||||
|
||||
const setIsTemporary = useRecoilCallback(
|
||||
({ set }) =>
|
||||
(value: boolean) => {
|
||||
|
@ -49,6 +44,16 @@ export default function ChatRoute() {
|
|||
const endpointsQuery = useGetEndpointsQuery({ enabled: isAuthenticated });
|
||||
const assistantListMap = useAssistantListMap();
|
||||
|
||||
const isTemporaryChat = conversation && conversation.expiredAt ? true : false;
|
||||
|
||||
useEffect(() => {
|
||||
if (conversationId !== Constants.NEW_CONVO && !isTemporaryChat) {
|
||||
setIsTemporary(false);
|
||||
} else if (isTemporaryChat) {
|
||||
setIsTemporary(isTemporaryChat);
|
||||
}
|
||||
}, [conversationId, isTemporaryChat, setIsTemporary]);
|
||||
|
||||
/** This effect is mainly for the first conversation state change on first load of the page.
|
||||
* Adjusting this may have unintended consequences on the conversation state.
|
||||
*/
|
||||
|
@ -107,6 +112,7 @@ export default function ChatRoute() {
|
|||
hasSetConversation.current = true;
|
||||
}
|
||||
/* Creates infinite render if all dependencies included due to newConversation invocations exceeding call stack before hasSetConversation.current becomes truthy */
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [
|
||||
startupConfig,
|
||||
initialConvoQuery.data,
|
||||
|
@ -140,14 +146,6 @@ export default function ChatRoute() {
|
|||
return null;
|
||||
}
|
||||
|
||||
const isTemporaryChat = conversation && conversation.expiredAt ? true : false;
|
||||
|
||||
if (conversationId !== Constants.NEW_CONVO && !isTemporaryChat) {
|
||||
setIsTemporary(false);
|
||||
} else if (isTemporaryChat) {
|
||||
setIsTemporary(isTemporaryChat);
|
||||
}
|
||||
|
||||
return (
|
||||
<ToolCallsMapProvider conversationId={conversation.conversationId ?? ''}>
|
||||
<ChatView index={index} />
|
||||
|
|
|
@ -17,6 +17,7 @@ import {
|
|||
import TermsAndConditionsModal from '~/components/ui/TermsAndConditionsModal';
|
||||
import { useUserTermsQuery, useGetStartupConfig } from '~/data-provider';
|
||||
import { Nav, MobileNav } from '~/components/Nav';
|
||||
import { useHealthCheck } from '~/data-provider';
|
||||
import { Banner } from '~/components/Banners';
|
||||
|
||||
export default function Root() {
|
||||
|
@ -28,6 +29,10 @@ export default function Root() {
|
|||
});
|
||||
|
||||
const { isAuthenticated, logout } = useAuthContext();
|
||||
|
||||
// Global health check - runs once per authenticated session
|
||||
useHealthCheck(isAuthenticated);
|
||||
|
||||
const assistantsMap = useAssistantsMap({ isAuthenticated });
|
||||
const agentsMap = useAgentsMap({ isAuthenticated });
|
||||
const fileMap = useFileMap({ isAuthenticated });
|
||||
|
|
5
client/src/utils/citations.ts
Normal file
5
client/src/utils/citations.ts
Normal file
|
@ -0,0 +1,5 @@
|
|||
export const SPAN_REGEX = /(\\ue203.*?\\ue204)/g;
|
||||
export const COMPOSITE_REGEX = /(\\ue200.*?\\ue201)/g;
|
||||
export const STANDALONE_PATTERN = /\\ue202turn(\d+)(search|image|news|video|ref)(\d+)/g;
|
||||
export const CLEANUP_REGEX = /\\ue200|\\ue201|\\ue202|\\ue203|\\ue204|\\ue206/g;
|
||||
export const INVALID_CITATION_REGEX = /\s*\\ue202turn\d+(search|news|image|video|ref)\d+/g;
|
|
@ -38,7 +38,7 @@ export default defineConfig({
|
|||
useCredentials: true,
|
||||
workbox: {
|
||||
globPatterns: ['**/*'],
|
||||
globIgnores: ['images/**/*'],
|
||||
globIgnores: ['images/**/*', '**/*.map'],
|
||||
maximumFileSizeToCacheInBytes: 4 * 1024 * 1024,
|
||||
navigateFallbackDenylist: [/^\/oauth/],
|
||||
},
|
||||
|
@ -96,26 +96,54 @@ export default defineConfig({
|
|||
output: {
|
||||
manualChunks(id: string) {
|
||||
if (id.includes('node_modules')) {
|
||||
// Group Radix UI libraries together.
|
||||
// High-impact chunking for large libraries
|
||||
if (id.includes('@codesandbox/sandpack')) {
|
||||
return 'sandpack';
|
||||
}
|
||||
if (id.includes('react-virtualized')) {
|
||||
return 'virtualization';
|
||||
}
|
||||
if (id.includes('i18next') || id.includes('react-i18next')) {
|
||||
return 'i18n';
|
||||
}
|
||||
if (id.includes('lodash')) {
|
||||
return 'utilities';
|
||||
}
|
||||
if (id.includes('date-fns')) {
|
||||
return 'date-utils';
|
||||
}
|
||||
if (id.includes('@dicebear')) {
|
||||
return 'avatars';
|
||||
}
|
||||
if (id.includes('react-dnd') || id.includes('react-flip-toolkit')) {
|
||||
return 'react-interactions';
|
||||
}
|
||||
if (id.includes('react-hook-form')) {
|
||||
return 'forms';
|
||||
}
|
||||
if (id.includes('react-router-dom')) {
|
||||
return 'routing';
|
||||
}
|
||||
if (id.includes('qrcode.react') || id.includes('@marsidev/react-turnstile')) {
|
||||
return 'security-ui';
|
||||
}
|
||||
|
||||
// Existing chunks
|
||||
if (id.includes('@radix-ui')) {
|
||||
return 'radix-ui';
|
||||
}
|
||||
// Group framer-motion separately.
|
||||
if (id.includes('framer-motion')) {
|
||||
return 'framer-motion';
|
||||
}
|
||||
// Group markdown-related libraries.
|
||||
if (id.includes('node_modules/highlight.js')) {
|
||||
return 'markdown_highlight';
|
||||
}
|
||||
if (id.includes('node_modules/hast-util-raw') || id.includes('node_modules/katex')) {
|
||||
return 'markdown_large';
|
||||
}
|
||||
// Group TanStack libraries together.
|
||||
if (id.includes('@tanstack')) {
|
||||
return 'tanstack-vendor';
|
||||
}
|
||||
// Additional grouping for other node_modules:
|
||||
if (id.includes('@headlessui')) {
|
||||
return 'headlessui';
|
||||
}
|
||||
|
@ -133,7 +161,7 @@ export default defineConfig({
|
|||
entryFileNames: 'assets/[name].[hash].js',
|
||||
chunkFileNames: 'assets/[name].[hash].js',
|
||||
assetFileNames: (assetInfo) => {
|
||||
if (assetInfo.names && /\.(woff|woff2|eot|ttf|otf)$/.test(assetInfo.names)) {
|
||||
if (assetInfo.names?.[0] && /\.(woff|woff2|eot|ttf|otf)$/.test(assetInfo.names[0])) {
|
||||
return 'assets/fonts/[name][extname]';
|
||||
}
|
||||
return 'assets/[name].[hash][extname]';
|
||||
|
@ -150,12 +178,12 @@ export default defineConfig({
|
|||
warn(warning);
|
||||
},
|
||||
},
|
||||
chunkSizeWarningLimit: 1200,
|
||||
chunkSizeWarningLimit: 1500,
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
'~': path.join(__dirname, 'src/'),
|
||||
$fonts: resolve('public/fonts'),
|
||||
$fonts: '/fonts',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
|
|
@ -6,6 +6,7 @@ import type {
|
|||
TWebSearchConfig,
|
||||
} from '../src/config';
|
||||
import { webSearchAuth, loadWebSearchAuth, extractWebSearchEnvVars } from '../src/web';
|
||||
import { SafeSearchTypes } from '../src/config';
|
||||
import { AuthType } from '../src/schemas';
|
||||
|
||||
// Mock the extractVariableName function
|
||||
|
@ -33,7 +34,7 @@ describe('web.ts', () => {
|
|||
serperApiKey: '${SERPER_API_KEY}',
|
||||
jinaApiKey: '${JINA_API_KEY}',
|
||||
cohereApiKey: 'actual-api-key', // Not in env var format
|
||||
safeSearch: true,
|
||||
safeSearch: SafeSearchTypes.MODERATE,
|
||||
};
|
||||
|
||||
const result = extractWebSearchEnvVars({
|
||||
|
@ -48,7 +49,7 @@ describe('web.ts', () => {
|
|||
const config: Partial<TWebSearchConfig> = {
|
||||
serperApiKey: '${SERPER_API_KEY}',
|
||||
// firecrawlApiKey is missing
|
||||
safeSearch: true,
|
||||
safeSearch: SafeSearchTypes.MODERATE,
|
||||
};
|
||||
|
||||
const result = extractWebSearchEnvVars({
|
||||
|
@ -80,7 +81,7 @@ describe('web.ts', () => {
|
|||
firecrawlApiUrl: '${FIRECRAWL_API_URL}',
|
||||
jinaApiKey: '${JINA_API_KEY}',
|
||||
cohereApiKey: '${COHERE_API_KEY}',
|
||||
safeSearch: true,
|
||||
safeSearch: SafeSearchTypes.MODERATE,
|
||||
};
|
||||
});
|
||||
|
||||
|
@ -250,8 +251,11 @@ describe('web.ts', () => {
|
|||
return Promise.resolve(result);
|
||||
});
|
||||
|
||||
// Test with safeSearch: false
|
||||
const configWithSafeSearchOff = { ...webSearchConfig, safeSearch: false } as TWebSearchConfig;
|
||||
// Test with safeSearch: OFF
|
||||
const configWithSafeSearchOff = {
|
||||
...webSearchConfig,
|
||||
safeSearch: SafeSearchTypes.OFF,
|
||||
} as TWebSearchConfig;
|
||||
|
||||
const result = await loadWebSearchAuth({
|
||||
userId,
|
||||
|
@ -259,7 +263,7 @@ describe('web.ts', () => {
|
|||
loadAuthValues: mockLoadAuthValues,
|
||||
});
|
||||
|
||||
expect(result.authResult).toHaveProperty('safeSearch', false);
|
||||
expect(result.authResult).toHaveProperty('safeSearch', SafeSearchTypes.OFF);
|
||||
});
|
||||
|
||||
it('should set the correct service types in authResult', async () => {
|
||||
|
@ -294,7 +298,7 @@ describe('web.ts', () => {
|
|||
firecrawlApiUrl: '${FIRECRAWL_API_URL}',
|
||||
jinaApiKey: '${JINA_API_KEY}',
|
||||
cohereApiKey: '${COHERE_API_KEY}',
|
||||
safeSearch: true,
|
||||
safeSearch: SafeSearchTypes.MODERATE,
|
||||
};
|
||||
|
||||
// Mock successful authentication
|
||||
|
@ -343,7 +347,7 @@ describe('web.ts', () => {
|
|||
firecrawlApiUrl: '${FIRECRAWL_API_URL}',
|
||||
jinaApiKey: '${JINA_API_KEY}',
|
||||
cohereApiKey: '${COHERE_API_KEY}',
|
||||
safeSearch: true,
|
||||
safeSearch: SafeSearchTypes.MODERATE,
|
||||
// Specify which services to use
|
||||
searchProvider: 'serper' as SearchProviders,
|
||||
scraperType: 'firecrawl' as ScraperTypes,
|
||||
|
@ -432,7 +436,7 @@ describe('web.ts', () => {
|
|||
firecrawlApiUrl: '${CUSTOM_FIRECRAWL_URL}',
|
||||
jinaApiKey: '${CUSTOM_JINA_KEY}',
|
||||
cohereApiKey: '${CUSTOM_COHERE_KEY}',
|
||||
safeSearch: true,
|
||||
safeSearch: SafeSearchTypes.MODERATE,
|
||||
// Specify which services to use
|
||||
searchProvider: 'serper' as SearchProviders,
|
||||
scraperType: 'firecrawl' as ScraperTypes,
|
||||
|
@ -500,7 +504,7 @@ describe('web.ts', () => {
|
|||
firecrawlApiUrl: '${FIRECRAWL_API_URL}',
|
||||
jinaApiKey: '${JINA_API_KEY}',
|
||||
cohereApiKey: '${COHERE_API_KEY}',
|
||||
safeSearch: true,
|
||||
safeSearch: SafeSearchTypes.MODERATE,
|
||||
};
|
||||
|
||||
// Mock loadAuthValues to return values
|
||||
|
@ -559,7 +563,7 @@ describe('web.ts', () => {
|
|||
firecrawlApiUrl: '${FIRECRAWL_API_URL}',
|
||||
jinaApiKey: '${JINA_API_KEY}',
|
||||
cohereApiKey: '${COHERE_API_KEY}',
|
||||
safeSearch: true,
|
||||
safeSearch: SafeSearchTypes.MODERATE,
|
||||
};
|
||||
|
||||
// Mock loadAuthValues to return partial values
|
||||
|
@ -666,7 +670,7 @@ describe('web.ts', () => {
|
|||
firecrawlApiUrl: '${FIRECRAWL_API_URL}',
|
||||
jinaApiKey: '${JINA_API_KEY}',
|
||||
cohereApiKey: '${COHERE_API_KEY}',
|
||||
safeSearch: true,
|
||||
safeSearch: SafeSearchTypes.MODERATE,
|
||||
searchProvider: 'serper' as SearchProviders,
|
||||
};
|
||||
|
||||
|
@ -704,7 +708,7 @@ describe('web.ts', () => {
|
|||
firecrawlApiUrl: '${FIRECRAWL_API_URL}',
|
||||
jinaApiKey: '${JINA_API_KEY}',
|
||||
cohereApiKey: '${COHERE_API_KEY}',
|
||||
safeSearch: true,
|
||||
safeSearch: SafeSearchTypes.MODERATE,
|
||||
scraperType: 'firecrawl' as ScraperTypes,
|
||||
};
|
||||
|
||||
|
@ -742,7 +746,7 @@ describe('web.ts', () => {
|
|||
firecrawlApiUrl: '${FIRECRAWL_API_URL}',
|
||||
jinaApiKey: '${JINA_API_KEY}',
|
||||
cohereApiKey: '${COHERE_API_KEY}',
|
||||
safeSearch: true,
|
||||
safeSearch: SafeSearchTypes.MODERATE,
|
||||
rerankerType: 'jina' as RerankerTypes,
|
||||
};
|
||||
|
||||
|
@ -786,7 +790,7 @@ describe('web.ts', () => {
|
|||
firecrawlApiUrl: '${FIRECRAWL_API_URL}',
|
||||
jinaApiKey: '${JINA_API_KEY}',
|
||||
cohereApiKey: '${COHERE_API_KEY}',
|
||||
safeSearch: true,
|
||||
safeSearch: SafeSearchTypes.MODERATE,
|
||||
searchProvider: 'invalid-provider' as SearchProviders,
|
||||
};
|
||||
|
||||
|
@ -818,7 +822,7 @@ describe('web.ts', () => {
|
|||
firecrawlApiUrl: '${FIRECRAWL_API_URL}',
|
||||
jinaApiKey: '${JINA_API_KEY}',
|
||||
cohereApiKey: '${COHERE_API_KEY}',
|
||||
safeSearch: true,
|
||||
safeSearch: SafeSearchTypes.MODERATE,
|
||||
rerankerType: 'jina' as RerankerTypes,
|
||||
};
|
||||
|
||||
|
@ -866,7 +870,7 @@ describe('web.ts', () => {
|
|||
firecrawlApiUrl: '${FIRECRAWL_API_URL}',
|
||||
jinaApiKey: '${JINA_API_KEY}',
|
||||
cohereApiKey: '${COHERE_API_KEY}',
|
||||
safeSearch: true,
|
||||
safeSearch: SafeSearchTypes.MODERATE,
|
||||
};
|
||||
|
||||
// Mock successful authentication
|
||||
|
|
|
@ -604,6 +604,12 @@ export enum RerankerTypes {
|
|||
COHERE = 'cohere',
|
||||
}
|
||||
|
||||
export enum SafeSearchTypes {
|
||||
OFF = 0,
|
||||
MODERATE = 1,
|
||||
STRICT = 2,
|
||||
}
|
||||
|
||||
export const webSearchSchema = z.object({
|
||||
serperApiKey: z.string().optional().default('${SERPER_API_KEY}'),
|
||||
firecrawlApiKey: z.string().optional().default('${FIRECRAWL_API_KEY}'),
|
||||
|
@ -613,7 +619,8 @@ export const webSearchSchema = z.object({
|
|||
searchProvider: z.nativeEnum(SearchProviders).optional(),
|
||||
scraperType: z.nativeEnum(ScraperTypes).optional(),
|
||||
rerankerType: z.nativeEnum(RerankerTypes).optional(),
|
||||
safeSearch: z.boolean().default(true),
|
||||
scraperTimeout: z.number().optional(),
|
||||
safeSearch: z.nativeEnum(SafeSearchTypes).default(SafeSearchTypes.MODERATE),
|
||||
});
|
||||
|
||||
export type TWebSearchConfig = z.infer<typeof webSearchSchema>;
|
||||
|
|
|
@ -6,7 +6,7 @@ import type {
|
|||
TWebSearchConfig,
|
||||
} from './config';
|
||||
import { extractVariableName } from './utils';
|
||||
import { SearchCategories } from './config';
|
||||
import { SearchCategories, SafeSearchTypes } from './config';
|
||||
import { AuthType } from './schemas';
|
||||
|
||||
export function loadWebSearchConfig(
|
||||
|
@ -17,7 +17,7 @@ export function loadWebSearchConfig(
|
|||
const firecrawlApiUrl = config?.firecrawlApiUrl ?? '${FIRECRAWL_API_URL}';
|
||||
const jinaApiKey = config?.jinaApiKey ?? '${JINA_API_KEY}';
|
||||
const cohereApiKey = config?.cohereApiKey ?? '${COHERE_API_KEY}';
|
||||
const safeSearch = config?.safeSearch ?? true;
|
||||
const safeSearch = config?.safeSearch ?? SafeSearchTypes.MODERATE;
|
||||
|
||||
return {
|
||||
...config,
|
||||
|
@ -260,7 +260,8 @@ export async function loadWebSearchAuth({
|
|||
authTypes.push([category, isUserProvided ? AuthType.USER_PROVIDED : AuthType.SYSTEM_DEFINED]);
|
||||
}
|
||||
|
||||
authResult.safeSearch = webSearchConfig?.safeSearch ?? true;
|
||||
authResult.safeSearch = webSearchConfig?.safeSearch ?? SafeSearchTypes.MODERATE;
|
||||
authResult.scraperTimeout = webSearchConfig?.scraperTimeout ?? 7500;
|
||||
|
||||
return {
|
||||
authTypes,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue