🔍 feat: Add Serper as Scraper Provider and Firecrawl Version Support (#9984)

* 🔧 chore: Update @librechat/agents to v2.4.84 in package.json and package-lock.json

* feat: Serper as new scraperProvider for Web Search and add firecrawlVersion support

* fix: TWebSearchKeys and ensure unique API keys extraction

* chore: Add build:packages script to streamline package builds
This commit is contained in:
Danny Avila 2025-10-05 20:34:05 -04:00 committed by GitHub
parent 857c054a9a
commit 31a283a4fe
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
14 changed files with 247 additions and 47 deletions

View file

@ -1,11 +1,11 @@
import { webSearchAuth } from '@librechat/data-schemas';
import { SafeSearchTypes, AuthType } from 'librechat-data-provider';
import type {
ScraperTypes,
ScraperProviders,
TWebSearchConfig,
SearchProviders,
TCustomConfig,
RerankerTypes,
SearchProviders,
TWebSearchConfig,
} from 'librechat-data-provider';
import { loadWebSearchAuth, extractWebSearchEnvVars } from './web';
@ -119,7 +119,7 @@ describe('web.ts', () => {
}
expect(result.authResult).toHaveProperty('searchProvider', 'serper');
expect(result.authResult).toHaveProperty('scraperType', 'firecrawl');
expect(result.authResult).toHaveProperty('scraperProvider', 'firecrawl');
expect(['jina', 'cohere']).toContain(result.authResult.rerankerType as string);
});
@ -288,7 +288,7 @@ describe('web.ts', () => {
// Check that the correct service types are set
expect(result.authResult.searchProvider).toBe('serper' as SearchProviders);
expect(result.authResult.scraperType).toBe('firecrawl' as ScraperTypes);
expect(result.authResult.scraperProvider).toBe('firecrawl' as ScraperProviders);
// One of the rerankers should be set
expect(['jina', 'cohere']).toContain(result.authResult.rerankerType as string);
});
@ -330,7 +330,7 @@ describe('web.ts', () => {
// Should have set values for all categories
expect(result.authResult.searchProvider).toBeDefined();
expect(result.authResult.scraperType).toBeDefined();
expect(result.authResult.scraperProvider).toBeDefined();
expect(result.authResult.rerankerType).toBeDefined();
});
@ -359,7 +359,7 @@ describe('web.ts', () => {
safeSearch: SafeSearchTypes.MODERATE,
// Specify which services to use
searchProvider: 'serper' as SearchProviders,
scraperType: 'firecrawl' as ScraperTypes,
scraperProvider: 'firecrawl' as ScraperProviders,
rerankerType: 'jina' as RerankerTypes,
};
@ -394,7 +394,7 @@ describe('web.ts', () => {
expect(result.authResult).toHaveProperty('firecrawlApiUrl');
expect(result.authResult).toHaveProperty('jinaApiKey');
expect(result.authResult).toHaveProperty('searchProvider');
expect(result.authResult).toHaveProperty('scraperType');
expect(result.authResult).toHaveProperty('scraperProvider');
expect(result.authResult).toHaveProperty('rerankerType');
expect(result.authenticated).toBe(true);
@ -419,7 +419,7 @@ describe('web.ts', () => {
expect(result.authResult).toHaveProperty('firecrawlApiUrl', 'https://api.firecrawl.dev');
expect(result.authResult).toHaveProperty('jinaApiKey', 'system-jina-key');
expect(result.authResult).toHaveProperty('searchProvider', 'serper');
expect(result.authResult).toHaveProperty('scraperType', 'firecrawl');
expect(result.authResult).toHaveProperty('scraperProvider', 'firecrawl');
expect(result.authResult).toHaveProperty('rerankerType', 'jina');
// Restore original env
@ -452,7 +452,7 @@ describe('web.ts', () => {
safeSearch: SafeSearchTypes.MODERATE,
// Specify which services to use
searchProvider: 'serper' as SearchProviders,
scraperType: 'firecrawl' as ScraperTypes,
scraperProvider: 'firecrawl' as ScraperProviders,
rerankerType: 'jina' as RerankerTypes, // Only Jina will be checked
};
@ -492,7 +492,7 @@ describe('web.ts', () => {
// Verify the service types are set correctly
expect(result.authResult).toHaveProperty('searchProvider', 'serper');
expect(result.authResult).toHaveProperty('scraperType', 'firecrawl');
expect(result.authResult).toHaveProperty('scraperProvider', 'firecrawl');
expect(result.authResult).toHaveProperty('rerankerType', 'jina');
// Restore original env
@ -722,8 +722,8 @@ describe('web.ts', () => {
expect(providerCalls.length).toBe(1);
});
it('should only check the specified scraperType', async () => {
// Initialize a webSearchConfig with a specific scraperType
it('should only check the specified scraperProvider', async () => {
// Initialize a webSearchConfig with a specific scraperProvider
const webSearchConfig: TCustomConfig['webSearch'] = {
serperApiKey: '${SERPER_API_KEY}',
searxngInstanceUrl: '${SEARXNG_INSTANCE_URL}',
@ -734,7 +734,7 @@ describe('web.ts', () => {
jinaApiUrl: '${JINA_API_URL}',
cohereApiKey: '${COHERE_API_KEY}',
safeSearch: SafeSearchTypes.MODERATE,
scraperType: 'firecrawl' as ScraperTypes,
scraperProvider: 'firecrawl' as ScraperProviders,
};
// Mock successful authentication
@ -754,7 +754,7 @@ describe('web.ts', () => {
});
expect(result.authenticated).toBe(true);
expect(result.authResult.scraperType).toBe('firecrawl');
expect(result.authResult.scraperProvider).toBe('firecrawl');
// Verify that only FIRECRAWL_API_KEY and FIRECRAWL_API_URL were requested for the scrapers category
const scraperCalls = mockLoadAuthValues.mock.calls.filter((call) =>
@ -933,7 +933,7 @@ describe('web.ts', () => {
// Should have set values for all categories
expect(result.authResult.searchProvider).toBeDefined();
expect(result.authResult.scraperType).toBeDefined();
expect(result.authResult.scraperProvider).toBeDefined();
expect(result.authResult.rerankerType).toBeDefined();
});