LibreChat/packages/mcp/src/examples/filesystem.ts
Danny Avila e391347b9e
🔧 feat: Initial MCP Support (Tools) (#5015)
* 📝 chore: Add comment to clarify purpose of check_updates.sh script

* feat: mcp package

* feat: add librechat-mcp package and update dependencies

* feat: refactor MCPConnectionSingleton to handle transport initialization and connection management

* feat: change private methods to public in MCPConnectionSingleton for improved accessibility

* feat: filesystem demo

* chore: everything demo and move everything under mcp workspace

* chore: move ts-node to mcp workspace

* feat: mcp examples

* feat: working sse MCP example

* refactor: rename MCPConnectionSingleton to MCPConnection for clarity

* refactor: replace MCPConnectionSingleton with MCPConnection for consistency

* refactor: manager/connections

* refactor: update MCPConnection to use type definitions from mcp types

* refactor: update MCPManager to use winston logger and enhance server initialization

* refactor: share logger between connections and manager

* refactor: add schema definitions and update MCPManager to accept logger parameter

* feat: map available MCP tools

* feat: load manifest tools

* feat: add MCP tools delimiter constant and update plugin key generation

* feat: call MCP tools

* feat: update librechat-data-provider version to 0.7.63 and enhance StdioOptionsSchema with additional properties

* refactor: simplify typing

* chore: update types/packages

* feat: MCP Tool Content parsing

* chore: update dependencies and improve package configurations

* feat: add 'mcp' directory to package and update configurations

* refactor: return CONTENT_AND_ARTIFACT format for MCP callTool

* chore: bump @librechat/agents

* WIP: MCP artifacts

* chore: bump @librechat/agents to v1.8.7

* fix: ensure filename has extension when saving base64 image

* fix: move base64 buffer conversion before filename extension check

* chore: update backend review workflow to install MCP package

* fix: use correct `mime` method

* fix: enhance file metadata with message and tool call IDs in image saving process

* fix: refactor ToolCall component to handle MCP tool calls and improve domain extraction

* fix: update ToolItem component for default isInstalled value and improve localization in ToolSelectDialog

* fix: update ToolItem component to use consistent text color for tool description

* style: add theming to ToolSelectDialog

* fix: improve domain extraction logic in ToolCall component

* refactor: conversation item theming, fix rename UI bug, optimize props, add missing types

* feat: enhance MCP options schema with base options (iconPath to start) and make transport type optional, infer based on other option fields

* fix: improve reconnection logic with parallel init and exponential backoff and enhance transport debug logging

* refactor: improve logging format

* refactor: improve logging of available tools by displaying tool names

* refactor: improve reconnection/connection logic

* feat: add MCP package build process to Dockerfile

* feat: add fallback icon for tools without an image in ToolItem component

* feat: Assistants Support for MCP Tools

* fix(build): configure rollup to use output.dir for dynamic imports

* chore: update @librechat/agents to version 1.8.8 and add @langchain/anthropic dependency

* fix: update CONFIG_VERSION to 1.2.0
2024-12-17 13:12:57 -05:00

700 lines
23 KiB
JavaScript

#!/usr/bin/env node
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-nocheck
import { Server } from '@modelcontextprotocol/sdk/server/index.js';
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
import { SSEServerTransport } from '@modelcontextprotocol/sdk/server/sse.js';
import {
JSONRPCMessage,
CallToolRequestSchema,
ListToolsRequestSchema,
InitializeRequestSchema,
ToolSchema,
} from '@modelcontextprotocol/sdk/types.js';
import fs from 'fs/promises';
import path from 'path';
import os from 'os';
import { z } from 'zod';
import { zodToJsonSchema } from 'zod-to-json-schema';
import { diffLines, createTwoFilesPatch } from 'diff';
import { IncomingMessage, ServerResponse } from 'node:http';
import { minimatch } from 'minimatch';
import express from 'express';
function normalizePath(p: string): string {
return path.normalize(p).toLowerCase();
}
function expandHome(filepath: string): string {
if (filepath.startsWith('~/') || filepath === '~') {
return path.join(os.homedir(), filepath.slice(1));
}
return filepath;
}
// Command line argument parsing
const args = process.argv.slice(2);
// Parse command line arguments for transport type
const transportArg = args.find((arg) => arg.startsWith('--transport='));
const portArg = args.find((arg) => arg.startsWith('--port='));
const directories = args.filter((arg) => !arg.startsWith('--'));
if (directories.length === 0) {
console.error(
'Usage: mcp-server-filesystem [--transport=stdio|sse] [--port=3000] <allowed-directory> [additional-directories...]',
);
process.exit(1);
}
// Extract transport type and port from arguments
const transport = transportArg ? (transportArg.split('=')[1] as 'stdio' | 'sse') : 'stdio';
const port = portArg ? parseInt(portArg.split('=')[1], 10) : undefined;
// Store allowed directories in normalized form
const allowedDirectories = directories.map((dir) => normalizePath(path.resolve(expandHome(dir))));
// Validate that all directories exist and are accessible
/** @ts-ignore */
await Promise.all(
directories.map(async (dir) => {
try {
const stats = await fs.stat(dir);
if (!stats.isDirectory()) {
console.error(`Error: ${dir} is not a directory`);
process.exit(1);
}
} catch (error) {
console.error(`Error accessing directory ${dir}:`, error);
process.exit(1);
}
}),
);
// Security utilities
async function validatePath(requestedPath: string): Promise<string> {
const expandedPath = expandHome(requestedPath);
const absolute = path.isAbsolute(expandedPath)
? path.resolve(expandedPath)
: path.resolve(process.cwd(), expandedPath);
const normalizedRequested = normalizePath(absolute);
// Check if path is within allowed directories
const isAllowed = allowedDirectories.some((dir) => normalizedRequested.startsWith(dir));
if (!isAllowed) {
throw new Error(
`Access denied - path outside allowed directories: ${absolute} not in ${allowedDirectories.join(
', ',
)}`,
);
}
// Handle symlinks by checking their real path
try {
const realPath = await fs.realpath(absolute);
const normalizedReal = normalizePath(realPath);
const isRealPathAllowed = allowedDirectories.some((dir) => normalizedReal.startsWith(dir));
if (!isRealPathAllowed) {
throw new Error('Access denied - symlink target outside allowed directories');
}
return realPath;
} catch (error) {
// For new files that don't exist yet, verify parent directory
const parentDir = path.dirname(absolute);
try {
const realParentPath = await fs.realpath(parentDir);
const normalizedParent = normalizePath(realParentPath);
const isParentAllowed = allowedDirectories.some((dir) => normalizedParent.startsWith(dir));
if (!isParentAllowed) {
throw new Error('Access denied - parent directory outside allowed directories');
}
return absolute;
} catch {
throw new Error(`Parent directory does not exist: ${parentDir}`);
}
}
}
// Schema definitions
const ReadFileArgsSchema = z.object({
path: z.string(),
});
const ReadMultipleFilesArgsSchema = z.object({
paths: z.array(z.string()),
});
const WriteFileArgsSchema = z.object({
path: z.string(),
content: z.string(),
});
const EditOperation = z.object({
oldText: z.string().describe('Text to search for - must match exactly'),
newText: z.string().describe('Text to replace with'),
});
const EditFileArgsSchema = z.object({
path: z.string(),
edits: z.array(EditOperation),
dryRun: z.boolean().default(false).describe('Preview changes using git-style diff format'),
});
const CreateDirectoryArgsSchema = z.object({
path: z.string(),
});
const ListDirectoryArgsSchema = z.object({
path: z.string(),
});
const MoveFileArgsSchema = z.object({
source: z.string(),
destination: z.string(),
});
const SearchFilesArgsSchema = z.object({
path: z.string(),
pattern: z.string(),
excludePatterns: z.array(z.string()).optional().default([]),
});
const GetFileInfoArgsSchema = z.object({
path: z.string(),
});
const ToolInputSchema = ToolSchema.shape.inputSchema;
type ToolInput = z.infer<typeof ToolInputSchema>;
interface FileInfo {
size: number;
created: Date;
modified: Date;
accessed: Date;
isDirectory: boolean;
isFile: boolean;
permissions: string;
}
// Server setup
const server = new Server(
{
name: 'secure-filesystem-server',
version: '0.2.0',
},
{
capabilities: {
tools: {},
},
},
);
// Tool implementations
async function getFileStats(filePath: string): Promise<FileInfo> {
const stats = await fs.stat(filePath);
return {
size: stats.size,
created: stats.birthtime,
modified: stats.mtime,
accessed: stats.atime,
isDirectory: stats.isDirectory(),
isFile: stats.isFile(),
permissions: stats.mode.toString(8).slice(-3),
};
}
async function searchFiles(
rootPath: string,
pattern: string,
excludePatterns: string[] = [],
): Promise<string[]> {
const results: string[] = [];
async function search(currentPath: string) {
const entries = await fs.readdir(currentPath, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(currentPath, entry.name);
try {
// Validate each path before processing
await validatePath(fullPath);
// Check if path matches any exclude pattern
const relativePath = path.relative(rootPath, fullPath);
const shouldExclude = excludePatterns.some((pattern) => {
const globPattern = pattern.includes('*') ? pattern : `**/${pattern}/**`;
return minimatch(relativePath, globPattern, { dot: true });
});
if (shouldExclude) {
continue;
}
if (entry.name.toLowerCase().includes(pattern.toLowerCase())) {
results.push(fullPath);
}
if (entry.isDirectory()) {
await search(fullPath);
}
} catch (error) {
// Skip invalid paths during search
continue;
}
}
}
await search(rootPath);
return results;
}
// file editing and diffing utilities
function normalizeLineEndings(text: string): string {
return text.replace(/\r\n/g, '\n');
}
function createUnifiedDiff(originalContent: string, newContent: string, filepath = 'file'): string {
// Ensure consistent line endings for diff
const normalizedOriginal = normalizeLineEndings(originalContent);
const normalizedNew = normalizeLineEndings(newContent);
return createTwoFilesPatch(
filepath,
filepath,
normalizedOriginal,
normalizedNew,
'original',
'modified',
);
}
async function applyFileEdits(
filePath: string,
edits: Array<{ oldText: string; newText: string }>,
dryRun = false,
): Promise<string> {
// Read file content and normalize line endings
const content = normalizeLineEndings(await fs.readFile(filePath, 'utf-8'));
// Apply edits sequentially
let modifiedContent = content;
for (const edit of edits) {
const normalizedOld = normalizeLineEndings(edit.oldText);
const normalizedNew = normalizeLineEndings(edit.newText);
// If exact match exists, use it
if (modifiedContent.includes(normalizedOld)) {
modifiedContent = modifiedContent.replace(normalizedOld, normalizedNew);
continue;
}
// Otherwise, try line-by-line matching with flexibility for whitespace
const oldLines = normalizedOld.split('\n');
const contentLines = modifiedContent.split('\n');
let matchFound = false;
for (let i = 0; i <= contentLines.length - oldLines.length; i++) {
const potentialMatch = contentLines.slice(i, i + oldLines.length);
// Compare lines with normalized whitespace
const isMatch = oldLines.every((oldLine, j) => {
const contentLine = potentialMatch[j];
return oldLine.trim() === contentLine.trim();
});
if (isMatch) {
// Preserve original indentation of first line
const originalIndent = contentLines[i].match(/^\s*/)?.[0] || '';
const newLines = normalizedNew.split('\n').map((line, j) => {
if (j === 0) {
return originalIndent + line.trimStart();
}
// For subsequent lines, try to preserve relative indentation
const oldIndent = oldLines[j]?.match(/^\s*/)?.[0] || '';
const newIndent = line.match(/^\s*/)?.[0] || '';
if (oldIndent && newIndent) {
const relativeIndent = newIndent.length - oldIndent.length;
return originalIndent + ' '.repeat(Math.max(0, relativeIndent)) + line.trimStart();
}
return line;
});
contentLines.splice(i, oldLines.length, ...newLines);
modifiedContent = contentLines.join('\n');
matchFound = true;
break;
}
}
if (!matchFound) {
throw new Error(`Could not find exact match for edit:\n${edit.oldText}`);
}
}
// Create unified diff
const diff = createUnifiedDiff(content, modifiedContent, filePath);
// Format diff with appropriate number of backticks
let numBackticks = 3;
while (diff.includes('`'.repeat(numBackticks))) {
numBackticks++;
}
const formattedDiff = `${'`'.repeat(numBackticks)}diff\n${diff}${'`'.repeat(numBackticks)}\n\n`;
if (!dryRun) {
await fs.writeFile(filePath, modifiedContent, 'utf-8');
}
return formattedDiff;
}
// Tool handlers
server.setRequestHandler(ListToolsRequestSchema, async () => {
return {
tools: [
{
name: 'read_file',
description:
'Read the complete contents of a file from the file system. ' +
'Handles various text encodings and provides detailed error messages ' +
'if the file cannot be read. Use this tool when you need to examine ' +
'the contents of a single file. Only works within allowed directories.',
inputSchema: zodToJsonSchema(ReadFileArgsSchema) as ToolInput,
},
{
name: 'read_multiple_files',
description:
'Read the contents of multiple files simultaneously. This is more ' +
'efficient than reading files one by one when you need to analyze ' +
'or compare multiple files. Each file\'s content is returned with its ' +
'path as a reference. Failed reads for individual files won\'t stop ' +
'the entire operation. Only works within allowed directories.',
inputSchema: zodToJsonSchema(ReadMultipleFilesArgsSchema) as ToolInput,
},
{
name: 'write_file',
description:
'Create a new file or completely overwrite an existing file with new content. ' +
'Use with caution as it will overwrite existing files without warning. ' +
'Handles text content with proper encoding. Only works within allowed directories.',
inputSchema: zodToJsonSchema(WriteFileArgsSchema) as ToolInput,
},
{
name: 'edit_file',
description:
'Make line-based edits to a text file. Each edit replaces exact line sequences ' +
'with new content. Returns a git-style diff showing the changes made. ' +
'Only works within allowed directories.',
inputSchema: zodToJsonSchema(EditFileArgsSchema) as ToolInput,
},
{
name: 'create_directory',
description:
'Create a new directory or ensure a directory exists. Can create multiple ' +
'nested directories in one operation. If the directory already exists, ' +
'this operation will succeed silently. Perfect for setting up directory ' +
'structures for projects or ensuring required paths exist. Only works within allowed directories.',
inputSchema: zodToJsonSchema(CreateDirectoryArgsSchema) as ToolInput,
},
{
name: 'list_directory',
description:
'Get a detailed listing of all files and directories in a specified path. ' +
'Results clearly distinguish between files and directories with [FILE] and [DIR] ' +
'prefixes. This tool is essential for understanding directory structure and ' +
'finding specific files within a directory. Only works within allowed directories.',
inputSchema: zodToJsonSchema(ListDirectoryArgsSchema) as ToolInput,
},
{
name: 'move_file',
description:
'Move or rename files and directories. Can move files between directories ' +
'and rename them in a single operation. If the destination exists, the ' +
'operation will fail. Works across different directories and can be used ' +
'for simple renaming within the same directory. Both source and destination must be within allowed directories.',
inputSchema: zodToJsonSchema(MoveFileArgsSchema) as ToolInput,
},
{
name: 'search_files',
description:
'Recursively search for files and directories matching a pattern. ' +
'Searches through all subdirectories from the starting path. The search ' +
'is case-insensitive and matches partial names. Returns full paths to all ' +
'matching items. Great for finding files when you don\'t know their exact location. ' +
'Only searches within allowed directories.',
inputSchema: zodToJsonSchema(SearchFilesArgsSchema) as ToolInput,
},
{
name: 'get_file_info',
description:
'Retrieve detailed metadata about a file or directory. Returns comprehensive ' +
'information including size, creation time, last modified time, permissions, ' +
'and type. This tool is perfect for understanding file characteristics ' +
'without reading the actual content. Only works within allowed directories.',
inputSchema: zodToJsonSchema(GetFileInfoArgsSchema) as ToolInput,
},
{
name: 'list_allowed_directories',
description:
'Returns the list of directories that this server is allowed to access. ' +
'Use this to understand which directories are available before trying to access files.',
inputSchema: {
type: 'object',
properties: {},
required: [],
},
},
],
};
});
server.setRequestHandler(CallToolRequestSchema, async (request) => {
try {
const { name, arguments: args } = request.params;
switch (name) {
case 'read_file': {
const parsed = ReadFileArgsSchema.safeParse(args);
if (!parsed.success) {
throw new Error(`Invalid arguments for read_file: ${parsed.error}`);
}
const validPath = await validatePath(parsed.data.path);
const content = await fs.readFile(validPath, 'utf-8');
return {
content: [{ type: 'text', text: content }],
};
}
case 'read_multiple_files': {
const parsed = ReadMultipleFilesArgsSchema.safeParse(args);
if (!parsed.success) {
throw new Error(`Invalid arguments for read_multiple_files: ${parsed.error}`);
}
const results = await Promise.all(
parsed.data.paths.map(async (filePath: string) => {
try {
const validPath = await validatePath(filePath);
const content = await fs.readFile(validPath, 'utf-8');
return `${filePath}:\n${content}\n`;
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
return `${filePath}: Error - ${errorMessage}`;
}
}),
);
return {
content: [{ type: 'text', text: results.join('\n---\n') }],
};
}
case 'write_file': {
const parsed = WriteFileArgsSchema.safeParse(args);
if (!parsed.success) {
throw new Error(`Invalid arguments for write_file: ${parsed.error}`);
}
const validPath = await validatePath(parsed.data.path);
await fs.writeFile(validPath, parsed.data.content, 'utf-8');
return {
content: [{ type: 'text', text: `Successfully wrote to ${parsed.data.path}` }],
};
}
case 'edit_file': {
const parsed = EditFileArgsSchema.safeParse(args);
if (!parsed.success) {
throw new Error(`Invalid arguments for edit_file: ${parsed.error}`);
}
const validPath = await validatePath(parsed.data.path);
const result = await applyFileEdits(validPath, parsed.data.edits, parsed.data.dryRun);
return {
content: [{ type: 'text', text: result }],
};
}
case 'create_directory': {
const parsed = CreateDirectoryArgsSchema.safeParse(args);
if (!parsed.success) {
throw new Error(`Invalid arguments for create_directory: ${parsed.error}`);
}
const validPath = await validatePath(parsed.data.path);
await fs.mkdir(validPath, { recursive: true });
return {
content: [{ type: 'text', text: `Successfully created directory ${parsed.data.path}` }],
};
}
case 'list_directory': {
const parsed = ListDirectoryArgsSchema.safeParse(args);
if (!parsed.success) {
throw new Error(`Invalid arguments for list_directory: ${parsed.error}`);
}
const validPath = await validatePath(parsed.data.path);
const entries = await fs.readdir(validPath, { withFileTypes: true });
const formatted = entries
.map((entry) => `${entry.isDirectory() ? '[DIR]' : '[FILE]'} ${entry.name}`)
.join('\n');
return {
content: [{ type: 'text', text: formatted }],
};
}
case 'move_file': {
const parsed = MoveFileArgsSchema.safeParse(args);
if (!parsed.success) {
throw new Error(`Invalid arguments for move_file: ${parsed.error}`);
}
const validSourcePath = await validatePath(parsed.data.source);
const validDestPath = await validatePath(parsed.data.destination);
await fs.rename(validSourcePath, validDestPath);
return {
content: [
{
type: 'text',
text: `Successfully moved ${parsed.data.source} to ${parsed.data.destination}`,
},
],
};
}
case 'search_files': {
const parsed = SearchFilesArgsSchema.safeParse(args);
if (!parsed.success) {
throw new Error(`Invalid arguments for search_files: ${parsed.error}`);
}
const validPath = await validatePath(parsed.data.path);
const results = await searchFiles(
validPath,
parsed.data.pattern,
parsed.data.excludePatterns,
);
return {
content: [
{ type: 'text', text: results.length > 0 ? results.join('\n') : 'No matches found' },
],
};
}
case 'get_file_info': {
const parsed = GetFileInfoArgsSchema.safeParse(args);
if (!parsed.success) {
throw new Error(`Invalid arguments for get_file_info: ${parsed.error}`);
}
const validPath = await validatePath(parsed.data.path);
const info = await getFileStats(validPath);
return {
content: [
{
type: 'text',
text: Object.entries(info)
.map(([key, value]) => `${key}: ${value}`)
.join('\n'),
},
],
};
}
case 'list_allowed_directories': {
return {
content: [
{
type: 'text',
text: `Allowed directories:\n${allowedDirectories.join('\n')}`,
},
],
};
}
default:
throw new Error(`Unknown tool: ${name}`);
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
return {
content: [{ type: 'text', text: `Error: ${errorMessage}` }],
isError: true,
};
}
});
// Start server
// async function runServer() {
// const transport = new StdioServerTransport();
// await server.connect(transport);
// console.error('Secure MCP Filesystem Server running on stdio');
// console.error('Allowed directories:', allowedDirectories);
// }
// runServer().catch((error) => {
// console.error('Fatal error running server:', error);
// process.exit(1);
// });
async function runServer(transport: 'stdio' | 'sse', port?: number) {
if (transport === 'stdio') {
const stdioTransport = new StdioServerTransport();
await server.connect(stdioTransport);
console.error('Secure MCP Filesystem Server running on stdio');
console.error('Allowed directories:', allowedDirectories);
} else {
const app = express();
app.use(express.json());
// Set up CORS
app.use((req, res, next) => {
res.header('Access-Control-Allow-Origin', '*');
res.header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS');
res.header('Access-Control-Allow-Headers', 'Content-Type');
if (req.method === 'OPTIONS') {
return res.sendStatus(200);
}
next();
});
let transport: SSEServerTransport;
// SSE endpoint
app.get('/sse', async (req, res) => {
console.log('New SSE connection');
transport = new SSEServerTransport('/message', res);
await server.connect(transport);
// Cleanup on close
res.on('close', async () => {
console.log('SSE connection closed');
await server.close();
});
});
// Message endpoint
app.post('/message', async (req, res) => {
if (!transport) {
return res.status(503).send('SSE connection not established');
}
await transport.handlePostMessage(req, res);
});
const serverPort = port || 3001;
app.listen(serverPort, () => {
console.log(
`Secure MCP Filesystem Server running on SSE at http://localhost:${serverPort}/sse`,
);
console.log('Allowed directories:', allowedDirectories);
});
}
}
if (directories.length === 0) {
console.error(
'Usage: mcp-server-filesystem [--transport=stdio|sse] [--port=3000] <allowed-directory> [additional-directories...]',
);
process.exit(1);
}
// Start the server with the specified transport
runServer(transport, port).catch((error) => {
console.error('Fatal error running server:', error);
process.exit(1);
});