mirror of
https://github.com/modelcontextprotocol/servers.git
synced 2025-04-13 23:15:37 +03:00
Reorganize files and add some tests
This commit is contained in:
21
jest.config.js
Normal file
21
jest.config.js
Normal file
@@ -0,0 +1,21 @@
|
||||
/** @type {import('ts-jest').JestConfigWithTsJest} */
|
||||
export default {
|
||||
preset: 'ts-jest/presets/default-esm',
|
||||
testEnvironment: 'node',
|
||||
extensionsToTreatAsEsm: ['.ts'],
|
||||
moduleNameMapper: {
|
||||
'^(\\.{1,2}/.*)\\.js$': '$1',
|
||||
},
|
||||
transform: {
|
||||
'^.+\\.tsx?$': ['ts-jest', { useESM: true }],
|
||||
},
|
||||
transformIgnorePatterns: [
|
||||
'node_modules/(?!(@modelcontextprotocol)/)'
|
||||
],
|
||||
testMatch: ['**/src/**/__tests__/**/*.test.ts'],
|
||||
collectCoverageFrom: [
|
||||
'**/src/**/*.ts',
|
||||
'!**/src/**/__tests__/**',
|
||||
'!**/dist/**',
|
||||
],
|
||||
};
|
||||
3600
package-lock.json
generated
3600
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -16,7 +16,14 @@
|
||||
"build": "npm run build --workspaces",
|
||||
"watch": "npm run watch --workspaces",
|
||||
"publish-all": "npm publish --workspaces --access public",
|
||||
"link-all": "npm link --workspaces"
|
||||
"link-all": "npm link --workspaces",
|
||||
"test": "node --experimental-vm-modules node_modules/jest/bin/jest.js"
|
||||
},
|
||||
"devDependencies": {
|
||||
"jest": "^29.7.0",
|
||||
"ts-jest": "^29.1.2",
|
||||
"@types/jest": "^29.5.12",
|
||||
"@jest/globals": "^29.7.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/server-everything": "*",
|
||||
|
||||
296
src/filesystem/__tests__/read-file.test.ts
Normal file
296
src/filesystem/__tests__/read-file.test.ts
Normal file
@@ -0,0 +1,296 @@
|
||||
import { jest } from '@jest/globals';
|
||||
import { z } from "zod";
|
||||
import path from 'path';
|
||||
import type { PathLike } from 'fs';
|
||||
import { FilesystemServer } from '../server.js';
|
||||
import { FileSystemDependencies } from '../types.js';
|
||||
import { CallToolRequestSchema, CallToolResultSchema } from "@modelcontextprotocol/sdk/types.js";
|
||||
|
||||
// Test helper class to access protected methods
|
||||
class TestFilesystemServer extends FilesystemServer {
|
||||
async callTool(request: z.infer<typeof CallToolRequestSchema>) {
|
||||
return this.handleCallTool(request);
|
||||
}
|
||||
}
|
||||
|
||||
type CallToolRequest = z.infer<typeof CallToolRequestSchema>;
|
||||
type CallToolResult = z.infer<typeof CallToolResultSchema>;
|
||||
|
||||
// Test helper functions
|
||||
const createMockFs = () => ({
|
||||
readFile: jest.fn(),
|
||||
stat: jest.fn(),
|
||||
realpath: jest.fn(),
|
||||
mkdir: jest.fn(),
|
||||
writeFile: jest.fn(),
|
||||
readdir: jest.fn(),
|
||||
rename: jest.fn(),
|
||||
});
|
||||
|
||||
const createMockPath = () => ({
|
||||
...path,
|
||||
normalize: jest.fn(),
|
||||
isAbsolute: jest.fn(),
|
||||
resolve: jest.fn(),
|
||||
dirname: jest.fn(),
|
||||
join: jest.fn(),
|
||||
});
|
||||
|
||||
const createMockOs = () => ({
|
||||
homedir: jest.fn(),
|
||||
});
|
||||
|
||||
const createTestServer = (deps: FileSystemDependencies) => {
|
||||
const server = new TestFilesystemServer(['/allowed/dir'], deps);
|
||||
server.setupHandlers();
|
||||
return server;
|
||||
};
|
||||
|
||||
const setupBasicMocks = (
|
||||
mockPath: jest.Mocked<typeof path>,
|
||||
mockFs: jest.Mocked<typeof import('fs/promises')>,
|
||||
filePath: string,
|
||||
content: string
|
||||
) => {
|
||||
mockPath.normalize.mockImplementation((p: string) => p);
|
||||
mockPath.isAbsolute.mockReturnValue(true);
|
||||
mockPath.resolve.mockReturnValue(filePath);
|
||||
mockFs.realpath.mockResolvedValue(filePath);
|
||||
mockFs.readFile.mockResolvedValue(content);
|
||||
};
|
||||
|
||||
const setupErrorMocks = (
|
||||
mockPath: jest.Mocked<typeof path>,
|
||||
mockFs: jest.Mocked<typeof import('fs/promises')>,
|
||||
filePath: string,
|
||||
error: Error
|
||||
) => {
|
||||
mockPath.normalize.mockImplementation((p: string) => p);
|
||||
mockPath.isAbsolute.mockReturnValue(true);
|
||||
mockPath.resolve.mockReturnValue(filePath);
|
||||
mockFs.realpath.mockRejectedValue(error);
|
||||
};
|
||||
|
||||
describe('read_file', () => {
|
||||
let mockFs: jest.Mocked<typeof import('fs/promises')>;
|
||||
let mockPath: jest.Mocked<typeof import('path')>;
|
||||
let mockOs: jest.Mocked<typeof import('os')>;
|
||||
let server: TestFilesystemServer;
|
||||
let deps: FileSystemDependencies;
|
||||
|
||||
beforeAll(() => {
|
||||
// Create initial mocks and import jest
|
||||
mockFs = createMockFs() as any;
|
||||
mockPath = createMockPath() as any;
|
||||
mockOs = createMockOs() as any;
|
||||
deps = { fs: mockFs, path: mockPath, os: mockOs };
|
||||
|
||||
// Set up default mock implementations
|
||||
mockPath.join.mockImplementation((...paths: string[]) => paths.join('/'));
|
||||
mockPath.dirname.mockImplementation((p: string) => p.split('/').slice(0, -1).join('/'));
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset all mocks before each test
|
||||
jest.clearAllMocks();
|
||||
server = createTestServer(deps);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Clean up after each test
|
||||
server = null as unknown as TestFilesystemServer;
|
||||
});
|
||||
|
||||
describe('basic functionality', () => {
|
||||
it('should read a file successfully within allowed directory', async () => {
|
||||
const testPath = '/allowed/dir/test.txt';
|
||||
setupBasicMocks(mockPath, mockFs, testPath, 'file content');
|
||||
|
||||
const result = await server.callTool({
|
||||
method: 'tools/call',
|
||||
params: {
|
||||
name: 'read_file',
|
||||
arguments: {
|
||||
path: '/allowed/dir/test.txt'
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
content: [{ type: 'text', text: 'file content' }]
|
||||
});
|
||||
expect(mockFs.readFile).toHaveBeenCalledWith('/allowed/dir/test.txt', 'utf-8');
|
||||
});
|
||||
|
||||
it('should handle empty files', async () => {
|
||||
const testPath = '/allowed/dir/empty.txt';
|
||||
setupBasicMocks(mockPath, mockFs, testPath, '');
|
||||
|
||||
const result = await server.callTool({
|
||||
method: 'tools/call',
|
||||
params: {
|
||||
name: 'read_file',
|
||||
arguments: {
|
||||
path: '/allowed/dir/empty.txt'
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
content: [{ type: 'text', text: '' }]
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('path validation', () => {
|
||||
it('should handle relative paths', async () => {
|
||||
const testPath = '/allowed/dir/subfolder/test.txt';
|
||||
mockPath.normalize.mockImplementation(p => p);
|
||||
mockPath.isAbsolute.mockReturnValue(false);
|
||||
mockPath.resolve.mockReturnValue(testPath);
|
||||
mockFs.realpath.mockResolvedValue(testPath);
|
||||
mockFs.readFile.mockResolvedValue('relative path content');
|
||||
|
||||
const result = await server.callTool({
|
||||
method: 'tools/call',
|
||||
params: {
|
||||
name: 'read_file',
|
||||
arguments: {
|
||||
path: 'subfolder/test.txt'
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
content: [{ type: 'text', text: 'relative path content' }]
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle home directory expansion', async () => {
|
||||
const testPath = '/allowed/dir/test.txt';
|
||||
mockOs.homedir.mockReturnValue('/home/user');
|
||||
setupBasicMocks(mockPath, mockFs, testPath, 'home dir content');
|
||||
|
||||
const result = await server.callTool({
|
||||
method: 'tools/call',
|
||||
params: {
|
||||
name: 'read_file',
|
||||
arguments: {
|
||||
path: '~/test.txt'
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
content: [{ type: 'text', text: 'home dir content' }]
|
||||
});
|
||||
expect(mockOs.homedir).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('security', () => {
|
||||
it('should reject paths outside allowed directories', async () => {
|
||||
const testPath = '/not/allowed/test.txt';
|
||||
setupBasicMocks(mockPath, mockFs, testPath, '');
|
||||
|
||||
const result = await server.callTool({
|
||||
method: 'tools/call',
|
||||
params: {
|
||||
name: 'read_file',
|
||||
arguments: {
|
||||
path: '/not/allowed/test.txt'
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(result.isError).toBe(true);
|
||||
expect(result.content[0].text).toContain('Access denied');
|
||||
});
|
||||
|
||||
it('should handle symlinks that point outside allowed directories', async () => {
|
||||
const sourcePath = '/allowed/dir/link.txt';
|
||||
const targetPath = '/not/allowed/target.txt';
|
||||
mockPath.normalize.mockImplementation(p => p);
|
||||
mockPath.isAbsolute.mockReturnValue(true);
|
||||
mockPath.resolve.mockReturnValue(sourcePath);
|
||||
mockFs.realpath.mockResolvedValue(targetPath);
|
||||
|
||||
const result = await server.callTool({
|
||||
method: 'tools/call',
|
||||
params: {
|
||||
name: 'read_file',
|
||||
arguments: {
|
||||
path: '/allowed/dir/link.txt'
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(result.isError).toBe(true);
|
||||
expect(result.content[0].text).toContain('Access denied');
|
||||
});
|
||||
|
||||
it('should handle non-existent files', async () => {
|
||||
const testPath = '/allowed/dir/nonexistent.txt';
|
||||
mockPath.normalize.mockImplementation(p => p);
|
||||
mockPath.isAbsolute.mockReturnValue(true);
|
||||
mockPath.resolve.mockReturnValue(testPath);
|
||||
mockFs.realpath.mockRejectedValue(new Error('ENOENT'));
|
||||
mockFs.readFile.mockRejectedValue(new Error('ENOENT'));
|
||||
|
||||
const result = await server.callTool({
|
||||
method: 'tools/call',
|
||||
params: {
|
||||
name: 'read_file',
|
||||
arguments: {
|
||||
path: '/allowed/dir/nonexistent.txt'
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(result.isError).toBe(true);
|
||||
expect(result.content[0].text).toContain('ENOENT');
|
||||
});
|
||||
});
|
||||
|
||||
describe('cross-platform', () => {
|
||||
it('should handle Windows-style paths', async () => {
|
||||
const testPath = '/allowed/dir/test.txt';
|
||||
mockPath.normalize.mockImplementation(p => p.replace(/\\/g, '/'));
|
||||
setupBasicMocks(mockPath, mockFs, testPath, 'windows path content');
|
||||
|
||||
const result = await server.callTool({
|
||||
method: 'tools/call',
|
||||
params: {
|
||||
name: 'read_file',
|
||||
arguments: {
|
||||
path: '\\allowed\\dir\\test.txt'
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
content: [{ type: 'text', text: 'windows path content' }]
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle mixed path separators', async () => {
|
||||
const testPath = '/allowed/dir/test.txt';
|
||||
mockPath.normalize.mockImplementation(p => p.replace(/\\/g, '/'));
|
||||
setupBasicMocks(mockPath, mockFs, testPath, 'mixed path content');
|
||||
|
||||
const result = await server.callTool({
|
||||
method: 'tools/call',
|
||||
params: {
|
||||
name: 'read_file',
|
||||
arguments: {
|
||||
path: '/allowed\\dir/test.txt'
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
content: [{ type: 'text', text: 'mixed path content' }]
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
110
src/filesystem/file-editor.ts
Normal file
110
src/filesystem/file-editor.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import { FileSystemDependencies } from './types.js';
|
||||
import { PathUtils } from './path-utils.js';
|
||||
import { createTwoFilesPatch } from 'diff';
|
||||
|
||||
export class FileEditor {
|
||||
constructor(
|
||||
private deps: FileSystemDependencies,
|
||||
private pathUtils: PathUtils,
|
||||
private allowedDirectories: string[]
|
||||
) {}
|
||||
|
||||
normalizeLineEndings(text: string): string {
|
||||
return text.replace(/\r\n/g, '\n');
|
||||
}
|
||||
|
||||
createUnifiedDiff(originalContent: string, newContent: string, filepath: string = 'file'): string {
|
||||
// Ensure consistent line endings for diff
|
||||
const normalizedOriginal = this.normalizeLineEndings(originalContent);
|
||||
const normalizedNew = this.normalizeLineEndings(newContent);
|
||||
|
||||
return createTwoFilesPatch(
|
||||
filepath,
|
||||
filepath,
|
||||
normalizedOriginal,
|
||||
normalizedNew,
|
||||
'original',
|
||||
'modified'
|
||||
);
|
||||
}
|
||||
|
||||
async applyFileEdits(
|
||||
filePath: string,
|
||||
edits: Array<{oldText: string, newText: string}>,
|
||||
dryRun = false
|
||||
): Promise<string> {
|
||||
const validPath = await this.pathUtils.validatePath(filePath, this.allowedDirectories);
|
||||
|
||||
// Read file content and normalize line endings
|
||||
const content = this.normalizeLineEndings(await this.deps.fs.readFile(validPath, 'utf-8'));
|
||||
|
||||
// Apply edits sequentially
|
||||
let modifiedContent = content;
|
||||
for (const edit of edits) {
|
||||
const normalizedOld = this.normalizeLineEndings(edit.oldText);
|
||||
const normalizedNew = this.normalizeLineEndings(edit.newText);
|
||||
|
||||
// If exact match exists, use it
|
||||
if (modifiedContent.includes(normalizedOld)) {
|
||||
modifiedContent = modifiedContent.replace(normalizedOld, normalizedNew);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Otherwise, try line-by-line matching with flexibility for whitespace
|
||||
const oldLines = normalizedOld.split('\n');
|
||||
const contentLines = modifiedContent.split('\n');
|
||||
let matchFound = false;
|
||||
|
||||
for (let i = 0; i <= contentLines.length - oldLines.length; i++) {
|
||||
const potentialMatch = contentLines.slice(i, i + oldLines.length);
|
||||
|
||||
// Compare lines with normalized whitespace
|
||||
const isMatch = oldLines.every((oldLine, j) => {
|
||||
const contentLine = potentialMatch[j];
|
||||
return oldLine.trim() === contentLine.trim();
|
||||
});
|
||||
|
||||
if (isMatch) {
|
||||
// Preserve original indentation of first line
|
||||
const originalIndent = contentLines[i].match(/^\s*/)?.[0] || '';
|
||||
const newLines = normalizedNew.split('\n').map((line, j) => {
|
||||
if (j === 0) return originalIndent + line.trimStart();
|
||||
// For subsequent lines, try to preserve relative indentation
|
||||
const oldIndent = oldLines[j]?.match(/^\s*/)?.[0] || '';
|
||||
const newIndent = line.match(/^\s*/)?.[0] || '';
|
||||
if (oldIndent && newIndent) {
|
||||
const relativeIndent = newIndent.length - oldIndent.length;
|
||||
return originalIndent + ' '.repeat(Math.max(0, relativeIndent)) + line.trimStart();
|
||||
}
|
||||
return line;
|
||||
});
|
||||
|
||||
contentLines.splice(i, oldLines.length, ...newLines);
|
||||
modifiedContent = contentLines.join('\n');
|
||||
matchFound = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!matchFound) {
|
||||
throw new Error(`Could not find exact match for edit:\n${edit.oldText}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Create unified diff
|
||||
const diff = this.createUnifiedDiff(content, modifiedContent, filePath);
|
||||
|
||||
// Format diff with appropriate number of backticks
|
||||
let numBackticks = 3;
|
||||
while (diff.includes('`'.repeat(numBackticks))) {
|
||||
numBackticks++;
|
||||
}
|
||||
const formattedDiff = `${'`'.repeat(numBackticks)}diff\n${diff}${'`'.repeat(numBackticks)}\n\n`;
|
||||
|
||||
if (!dryRun) {
|
||||
await this.deps.fs.writeFile(validPath, modifiedContent, 'utf-8');
|
||||
}
|
||||
|
||||
return formattedDiff;
|
||||
}
|
||||
}
|
||||
73
src/filesystem/file-operations.ts
Normal file
73
src/filesystem/file-operations.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import { FileSystemDependencies, FileInfo } from './types.js';
|
||||
import { PathUtils } from './path-utils.js';
|
||||
import { minimatch } from 'minimatch';
|
||||
import path from 'path';
|
||||
|
||||
export class FileOperations {
|
||||
constructor(
|
||||
private deps: FileSystemDependencies,
|
||||
private pathUtils: PathUtils,
|
||||
private allowedDirectories: string[]
|
||||
) {}
|
||||
|
||||
async getFileStats(filePath: string): Promise<FileInfo> {
|
||||
const validPath = await this.pathUtils.validatePath(filePath, this.allowedDirectories);
|
||||
const stats = await this.deps.fs.stat(validPath);
|
||||
return {
|
||||
size: stats.size,
|
||||
created: stats.birthtime,
|
||||
modified: stats.mtime,
|
||||
accessed: stats.atime,
|
||||
isDirectory: stats.isDirectory(),
|
||||
isFile: stats.isFile(),
|
||||
permissions: stats.mode.toString(8).slice(-3),
|
||||
};
|
||||
}
|
||||
|
||||
async searchFiles(
|
||||
rootPath: string,
|
||||
pattern: string,
|
||||
excludePatterns: string[] = []
|
||||
): Promise<string[]> {
|
||||
const results: string[] = [];
|
||||
const validRootPath = await this.pathUtils.validatePath(rootPath, this.allowedDirectories);
|
||||
|
||||
const search = async (currentPath: string): Promise<void> => {
|
||||
const entries = await this.deps.fs.readdir(currentPath, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = this.deps.path.join(currentPath, entry.name);
|
||||
|
||||
try {
|
||||
// Validate each path before processing
|
||||
await this.pathUtils.validatePath(fullPath, this.allowedDirectories);
|
||||
|
||||
// Check if path matches any exclude pattern
|
||||
const relativePath = this.deps.path.relative(rootPath, fullPath);
|
||||
const shouldExclude = excludePatterns.some(pattern => {
|
||||
const globPattern = pattern.includes('*') ? pattern : `**/${pattern}/**`;
|
||||
return minimatch(relativePath, globPattern, { dot: true });
|
||||
});
|
||||
|
||||
if (shouldExclude) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entry.name.toLowerCase().includes(pattern.toLowerCase())) {
|
||||
results.push(fullPath);
|
||||
}
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
await search(fullPath);
|
||||
}
|
||||
} catch (error) {
|
||||
// Skip invalid paths during search
|
||||
continue;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
await search(validRootPath);
|
||||
return results;
|
||||
}
|
||||
}
|
||||
@@ -1,19 +1,9 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
||||
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
||||
import {
|
||||
CallToolRequestSchema,
|
||||
ListToolsRequestSchema,
|
||||
ToolSchema,
|
||||
} from "@modelcontextprotocol/sdk/types.js";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import os from 'os';
|
||||
import { z } from "zod";
|
||||
import { zodToJsonSchema } from "zod-to-json-schema";
|
||||
import { diffLines, createTwoFilesPatch } from 'diff';
|
||||
import { minimatch } from 'minimatch';
|
||||
import { FilesystemServer } from './server.js';
|
||||
|
||||
// Command line argument parsing
|
||||
const args = process.argv.slice(2);
|
||||
@@ -22,626 +12,13 @@ if (args.length === 0) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Normalize all paths consistently
|
||||
function normalizePath(p: string): string {
|
||||
return path.normalize(p);
|
||||
}
|
||||
|
||||
function expandHome(filepath: string): string {
|
||||
if (filepath.startsWith('~/') || filepath === '~') {
|
||||
return path.join(os.homedir(), filepath.slice(1));
|
||||
}
|
||||
return filepath;
|
||||
}
|
||||
|
||||
// Store allowed directories in normalized form
|
||||
const allowedDirectories = args.map(dir =>
|
||||
normalizePath(path.resolve(expandHome(dir)))
|
||||
);
|
||||
|
||||
// Validate that all directories exist and are accessible
|
||||
await Promise.all(args.map(async (dir) => {
|
||||
try {
|
||||
const stats = await fs.stat(expandHome(dir));
|
||||
if (!stats.isDirectory()) {
|
||||
console.error(`Error: ${dir} is not a directory`);
|
||||
process.exit(1);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error accessing directory ${dir}:`, error);
|
||||
process.exit(1);
|
||||
}
|
||||
}));
|
||||
|
||||
// Security utilities
|
||||
async function validatePath(requestedPath: string): Promise<string> {
|
||||
const expandedPath = expandHome(requestedPath);
|
||||
const absolute = path.isAbsolute(expandedPath)
|
||||
? path.resolve(expandedPath)
|
||||
: path.resolve(process.cwd(), expandedPath);
|
||||
|
||||
const normalizedRequested = normalizePath(absolute);
|
||||
|
||||
// Check if path is within allowed directories
|
||||
const isAllowed = allowedDirectories.some(dir => normalizedRequested.startsWith(dir));
|
||||
if (!isAllowed) {
|
||||
throw new Error(`Access denied - path outside allowed directories: ${absolute} not in ${allowedDirectories.join(', ')}`);
|
||||
}
|
||||
|
||||
// Handle symlinks by checking their real path
|
||||
try {
|
||||
const realPath = await fs.realpath(absolute);
|
||||
const normalizedReal = normalizePath(realPath);
|
||||
const isRealPathAllowed = allowedDirectories.some(dir => normalizedReal.startsWith(dir));
|
||||
if (!isRealPathAllowed) {
|
||||
throw new Error("Access denied - symlink target outside allowed directories");
|
||||
}
|
||||
return realPath;
|
||||
} catch (error) {
|
||||
// For new files that don't exist yet, verify parent directory
|
||||
const parentDir = path.dirname(absolute);
|
||||
try {
|
||||
const realParentPath = await fs.realpath(parentDir);
|
||||
const normalizedParent = normalizePath(realParentPath);
|
||||
const isParentAllowed = allowedDirectories.some(dir => normalizedParent.startsWith(dir));
|
||||
if (!isParentAllowed) {
|
||||
throw new Error("Access denied - parent directory outside allowed directories");
|
||||
}
|
||||
return absolute;
|
||||
} catch {
|
||||
throw new Error(`Parent directory does not exist: ${parentDir}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Schema definitions
|
||||
const ReadFileArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
const server = new FilesystemServer(args, {
|
||||
fs,
|
||||
path,
|
||||
os
|
||||
});
|
||||
|
||||
const ReadMultipleFilesArgsSchema = z.object({
|
||||
paths: z.array(z.string()),
|
||||
});
|
||||
|
||||
const WriteFileArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
content: z.string(),
|
||||
});
|
||||
|
||||
const EditOperation = z.object({
|
||||
oldText: z.string().describe('Text to search for - must match exactly'),
|
||||
newText: z.string().describe('Text to replace with')
|
||||
});
|
||||
|
||||
const EditFileArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
edits: z.array(EditOperation),
|
||||
dryRun: z.boolean().default(false).describe('Preview changes using git-style diff format')
|
||||
});
|
||||
|
||||
const CreateDirectoryArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
});
|
||||
|
||||
const ListDirectoryArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
});
|
||||
|
||||
const DirectoryTreeArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
});
|
||||
|
||||
const MoveFileArgsSchema = z.object({
|
||||
source: z.string(),
|
||||
destination: z.string(),
|
||||
});
|
||||
|
||||
const SearchFilesArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
pattern: z.string(),
|
||||
excludePatterns: z.array(z.string()).optional().default([])
|
||||
});
|
||||
|
||||
const GetFileInfoArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
});
|
||||
|
||||
const ToolInputSchema = ToolSchema.shape.inputSchema;
|
||||
type ToolInput = z.infer<typeof ToolInputSchema>;
|
||||
|
||||
interface FileInfo {
|
||||
size: number;
|
||||
created: Date;
|
||||
modified: Date;
|
||||
accessed: Date;
|
||||
isDirectory: boolean;
|
||||
isFile: boolean;
|
||||
permissions: string;
|
||||
}
|
||||
|
||||
// Server setup
|
||||
const server = new Server(
|
||||
{
|
||||
name: "secure-filesystem-server",
|
||||
version: "0.2.0",
|
||||
},
|
||||
{
|
||||
capabilities: {
|
||||
tools: {},
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
// Tool implementations
|
||||
async function getFileStats(filePath: string): Promise<FileInfo> {
|
||||
const stats = await fs.stat(filePath);
|
||||
return {
|
||||
size: stats.size,
|
||||
created: stats.birthtime,
|
||||
modified: stats.mtime,
|
||||
accessed: stats.atime,
|
||||
isDirectory: stats.isDirectory(),
|
||||
isFile: stats.isFile(),
|
||||
permissions: stats.mode.toString(8).slice(-3),
|
||||
};
|
||||
}
|
||||
|
||||
async function searchFiles(
|
||||
rootPath: string,
|
||||
pattern: string,
|
||||
excludePatterns: string[] = []
|
||||
): Promise<string[]> {
|
||||
const results: string[] = [];
|
||||
|
||||
async function search(currentPath: string) {
|
||||
const entries = await fs.readdir(currentPath, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(currentPath, entry.name);
|
||||
|
||||
try {
|
||||
// Validate each path before processing
|
||||
await validatePath(fullPath);
|
||||
|
||||
// Check if path matches any exclude pattern
|
||||
const relativePath = path.relative(rootPath, fullPath);
|
||||
const shouldExclude = excludePatterns.some(pattern => {
|
||||
const globPattern = pattern.includes('*') ? pattern : `**/${pattern}/**`;
|
||||
return minimatch(relativePath, globPattern, { dot: true });
|
||||
});
|
||||
|
||||
if (shouldExclude) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entry.name.toLowerCase().includes(pattern.toLowerCase())) {
|
||||
results.push(fullPath);
|
||||
}
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
await search(fullPath);
|
||||
}
|
||||
} catch (error) {
|
||||
// Skip invalid paths during search
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await search(rootPath);
|
||||
return results;
|
||||
}
|
||||
|
||||
// file editing and diffing utilities
|
||||
function normalizeLineEndings(text: string): string {
|
||||
return text.replace(/\r\n/g, '\n');
|
||||
}
|
||||
|
||||
function createUnifiedDiff(originalContent: string, newContent: string, filepath: string = 'file'): string {
|
||||
// Ensure consistent line endings for diff
|
||||
const normalizedOriginal = normalizeLineEndings(originalContent);
|
||||
const normalizedNew = normalizeLineEndings(newContent);
|
||||
|
||||
return createTwoFilesPatch(
|
||||
filepath,
|
||||
filepath,
|
||||
normalizedOriginal,
|
||||
normalizedNew,
|
||||
'original',
|
||||
'modified'
|
||||
);
|
||||
}
|
||||
|
||||
async function applyFileEdits(
|
||||
filePath: string,
|
||||
edits: Array<{oldText: string, newText: string}>,
|
||||
dryRun = false
|
||||
): Promise<string> {
|
||||
// Read file content and normalize line endings
|
||||
const content = normalizeLineEndings(await fs.readFile(filePath, 'utf-8'));
|
||||
|
||||
// Apply edits sequentially
|
||||
let modifiedContent = content;
|
||||
for (const edit of edits) {
|
||||
const normalizedOld = normalizeLineEndings(edit.oldText);
|
||||
const normalizedNew = normalizeLineEndings(edit.newText);
|
||||
|
||||
// If exact match exists, use it
|
||||
if (modifiedContent.includes(normalizedOld)) {
|
||||
modifiedContent = modifiedContent.replace(normalizedOld, normalizedNew);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Otherwise, try line-by-line matching with flexibility for whitespace
|
||||
const oldLines = normalizedOld.split('\n');
|
||||
const contentLines = modifiedContent.split('\n');
|
||||
let matchFound = false;
|
||||
|
||||
for (let i = 0; i <= contentLines.length - oldLines.length; i++) {
|
||||
const potentialMatch = contentLines.slice(i, i + oldLines.length);
|
||||
|
||||
// Compare lines with normalized whitespace
|
||||
const isMatch = oldLines.every((oldLine, j) => {
|
||||
const contentLine = potentialMatch[j];
|
||||
return oldLine.trim() === contentLine.trim();
|
||||
});
|
||||
|
||||
if (isMatch) {
|
||||
// Preserve original indentation of first line
|
||||
const originalIndent = contentLines[i].match(/^\s*/)?.[0] || '';
|
||||
const newLines = normalizedNew.split('\n').map((line, j) => {
|
||||
if (j === 0) return originalIndent + line.trimStart();
|
||||
// For subsequent lines, try to preserve relative indentation
|
||||
const oldIndent = oldLines[j]?.match(/^\s*/)?.[0] || '';
|
||||
const newIndent = line.match(/^\s*/)?.[0] || '';
|
||||
if (oldIndent && newIndent) {
|
||||
const relativeIndent = newIndent.length - oldIndent.length;
|
||||
return originalIndent + ' '.repeat(Math.max(0, relativeIndent)) + line.trimStart();
|
||||
}
|
||||
return line;
|
||||
});
|
||||
|
||||
contentLines.splice(i, oldLines.length, ...newLines);
|
||||
modifiedContent = contentLines.join('\n');
|
||||
matchFound = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!matchFound) {
|
||||
throw new Error(`Could not find exact match for edit:\n${edit.oldText}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Create unified diff
|
||||
const diff = createUnifiedDiff(content, modifiedContent, filePath);
|
||||
|
||||
// Format diff with appropriate number of backticks
|
||||
let numBackticks = 3;
|
||||
while (diff.includes('`'.repeat(numBackticks))) {
|
||||
numBackticks++;
|
||||
}
|
||||
const formattedDiff = `${'`'.repeat(numBackticks)}diff\n${diff}${'`'.repeat(numBackticks)}\n\n`;
|
||||
|
||||
if (!dryRun) {
|
||||
await fs.writeFile(filePath, modifiedContent, 'utf-8');
|
||||
}
|
||||
|
||||
return formattedDiff;
|
||||
}
|
||||
|
||||
// Tool handlers
|
||||
server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
return {
|
||||
tools: [
|
||||
{
|
||||
name: "read_file",
|
||||
description:
|
||||
"Read the complete contents of a file from the file system. " +
|
||||
"Handles various text encodings and provides detailed error messages " +
|
||||
"if the file cannot be read. Use this tool when you need to examine " +
|
||||
"the contents of a single file. Only works within allowed directories.",
|
||||
inputSchema: zodToJsonSchema(ReadFileArgsSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
name: "read_multiple_files",
|
||||
description:
|
||||
"Read the contents of multiple files simultaneously. This is more " +
|
||||
"efficient than reading files one by one when you need to analyze " +
|
||||
"or compare multiple files. Each file's content is returned with its " +
|
||||
"path as a reference. Failed reads for individual files won't stop " +
|
||||
"the entire operation. Only works within allowed directories.",
|
||||
inputSchema: zodToJsonSchema(ReadMultipleFilesArgsSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
name: "write_file",
|
||||
description:
|
||||
"Create a new file or completely overwrite an existing file with new content. " +
|
||||
"Use with caution as it will overwrite existing files without warning. " +
|
||||
"Handles text content with proper encoding. Only works within allowed directories.",
|
||||
inputSchema: zodToJsonSchema(WriteFileArgsSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
name: "edit_file",
|
||||
description:
|
||||
"Make line-based edits to a text file. Each edit replaces exact line sequences " +
|
||||
"with new content. Returns a git-style diff showing the changes made. " +
|
||||
"Only works within allowed directories.",
|
||||
inputSchema: zodToJsonSchema(EditFileArgsSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
name: "create_directory",
|
||||
description:
|
||||
"Create a new directory or ensure a directory exists. Can create multiple " +
|
||||
"nested directories in one operation. If the directory already exists, " +
|
||||
"this operation will succeed silently. Perfect for setting up directory " +
|
||||
"structures for projects or ensuring required paths exist. Only works within allowed directories.",
|
||||
inputSchema: zodToJsonSchema(CreateDirectoryArgsSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
name: "list_directory",
|
||||
description:
|
||||
"Get a detailed listing of all files and directories in a specified path. " +
|
||||
"Results clearly distinguish between files and directories with [FILE] and [DIR] " +
|
||||
"prefixes. This tool is essential for understanding directory structure and " +
|
||||
"finding specific files within a directory. Only works within allowed directories.",
|
||||
inputSchema: zodToJsonSchema(ListDirectoryArgsSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
name: "directory_tree",
|
||||
description:
|
||||
"Get a recursive tree view of files and directories as a JSON structure. " +
|
||||
"Each entry includes 'name', 'type' (file/directory), and 'children' for directories. " +
|
||||
"Files have no children array, while directories always have a children array (which may be empty). " +
|
||||
"The output is formatted with 2-space indentation for readability. Only works within allowed directories.",
|
||||
inputSchema: zodToJsonSchema(DirectoryTreeArgsSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
name: "move_file",
|
||||
description:
|
||||
"Move or rename files and directories. Can move files between directories " +
|
||||
"and rename them in a single operation. If the destination exists, the " +
|
||||
"operation will fail. Works across different directories and can be used " +
|
||||
"for simple renaming within the same directory. Both source and destination must be within allowed directories.",
|
||||
inputSchema: zodToJsonSchema(MoveFileArgsSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
name: "search_files",
|
||||
description:
|
||||
"Recursively search for files and directories matching a pattern. " +
|
||||
"Searches through all subdirectories from the starting path. The search " +
|
||||
"is case-insensitive and matches partial names. Returns full paths to all " +
|
||||
"matching items. Great for finding files when you don't know their exact location. " +
|
||||
"Only searches within allowed directories.",
|
||||
inputSchema: zodToJsonSchema(SearchFilesArgsSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
name: "get_file_info",
|
||||
description:
|
||||
"Retrieve detailed metadata about a file or directory. Returns comprehensive " +
|
||||
"information including size, creation time, last modified time, permissions, " +
|
||||
"and type. This tool is perfect for understanding file characteristics " +
|
||||
"without reading the actual content. Only works within allowed directories.",
|
||||
inputSchema: zodToJsonSchema(GetFileInfoArgsSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
name: "list_allowed_directories",
|
||||
description:
|
||||
"Returns the list of directories that this server is allowed to access. " +
|
||||
"Use this to understand which directories are available before trying to access files.",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {},
|
||||
required: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
});
|
||||
|
||||
|
||||
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
try {
|
||||
const { name, arguments: args } = request.params;
|
||||
|
||||
switch (name) {
|
||||
case "read_file": {
|
||||
const parsed = ReadFileArgsSchema.safeParse(args);
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for read_file: ${parsed.error}`);
|
||||
}
|
||||
const validPath = await validatePath(parsed.data.path);
|
||||
const content = await fs.readFile(validPath, "utf-8");
|
||||
return {
|
||||
content: [{ type: "text", text: content }],
|
||||
};
|
||||
}
|
||||
|
||||
case "read_multiple_files": {
|
||||
const parsed = ReadMultipleFilesArgsSchema.safeParse(args);
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for read_multiple_files: ${parsed.error}`);
|
||||
}
|
||||
const results = await Promise.all(
|
||||
parsed.data.paths.map(async (filePath: string) => {
|
||||
try {
|
||||
const validPath = await validatePath(filePath);
|
||||
const content = await fs.readFile(validPath, "utf-8");
|
||||
return `${filePath}:\n${content}\n`;
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
return `${filePath}: Error - ${errorMessage}`;
|
||||
}
|
||||
}),
|
||||
);
|
||||
return {
|
||||
content: [{ type: "text", text: results.join("\n---\n") }],
|
||||
};
|
||||
}
|
||||
|
||||
case "write_file": {
|
||||
const parsed = WriteFileArgsSchema.safeParse(args);
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for write_file: ${parsed.error}`);
|
||||
}
|
||||
const validPath = await validatePath(parsed.data.path);
|
||||
await fs.writeFile(validPath, parsed.data.content, "utf-8");
|
||||
return {
|
||||
content: [{ type: "text", text: `Successfully wrote to ${parsed.data.path}` }],
|
||||
};
|
||||
}
|
||||
|
||||
case "edit_file": {
|
||||
const parsed = EditFileArgsSchema.safeParse(args);
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for edit_file: ${parsed.error}`);
|
||||
}
|
||||
const validPath = await validatePath(parsed.data.path);
|
||||
const result = await applyFileEdits(validPath, parsed.data.edits, parsed.data.dryRun);
|
||||
return {
|
||||
content: [{ type: "text", text: result }],
|
||||
};
|
||||
}
|
||||
|
||||
case "create_directory": {
|
||||
const parsed = CreateDirectoryArgsSchema.safeParse(args);
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for create_directory: ${parsed.error}`);
|
||||
}
|
||||
const validPath = await validatePath(parsed.data.path);
|
||||
await fs.mkdir(validPath, { recursive: true });
|
||||
return {
|
||||
content: [{ type: "text", text: `Successfully created directory ${parsed.data.path}` }],
|
||||
};
|
||||
}
|
||||
|
||||
case "list_directory": {
|
||||
const parsed = ListDirectoryArgsSchema.safeParse(args);
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for list_directory: ${parsed.error}`);
|
||||
}
|
||||
const validPath = await validatePath(parsed.data.path);
|
||||
const entries = await fs.readdir(validPath, { withFileTypes: true });
|
||||
const formatted = entries
|
||||
.map((entry) => `${entry.isDirectory() ? "[DIR]" : "[FILE]"} ${entry.name}`)
|
||||
.join("\n");
|
||||
return {
|
||||
content: [{ type: "text", text: formatted }],
|
||||
};
|
||||
}
|
||||
|
||||
case "directory_tree": {
|
||||
const parsed = DirectoryTreeArgsSchema.safeParse(args);
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for directory_tree: ${parsed.error}`);
|
||||
}
|
||||
|
||||
interface TreeEntry {
|
||||
name: string;
|
||||
type: 'file' | 'directory';
|
||||
children?: TreeEntry[];
|
||||
}
|
||||
|
||||
async function buildTree(currentPath: string): Promise<TreeEntry[]> {
|
||||
const validPath = await validatePath(currentPath);
|
||||
const entries = await fs.readdir(validPath, {withFileTypes: true});
|
||||
const result: TreeEntry[] = [];
|
||||
|
||||
for (const entry of entries) {
|
||||
const entryData: TreeEntry = {
|
||||
name: entry.name,
|
||||
type: entry.isDirectory() ? 'directory' : 'file'
|
||||
};
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
const subPath = path.join(currentPath, entry.name);
|
||||
entryData.children = await buildTree(subPath);
|
||||
}
|
||||
|
||||
result.push(entryData);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
const treeData = await buildTree(parsed.data.path);
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: JSON.stringify(treeData, null, 2)
|
||||
}],
|
||||
};
|
||||
}
|
||||
|
||||
case "move_file": {
|
||||
const parsed = MoveFileArgsSchema.safeParse(args);
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for move_file: ${parsed.error}`);
|
||||
}
|
||||
const validSourcePath = await validatePath(parsed.data.source);
|
||||
const validDestPath = await validatePath(parsed.data.destination);
|
||||
await fs.rename(validSourcePath, validDestPath);
|
||||
return {
|
||||
content: [{ type: "text", text: `Successfully moved ${parsed.data.source} to ${parsed.data.destination}` }],
|
||||
};
|
||||
}
|
||||
|
||||
case "search_files": {
|
||||
const parsed = SearchFilesArgsSchema.safeParse(args);
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for search_files: ${parsed.error}`);
|
||||
}
|
||||
const validPath = await validatePath(parsed.data.path);
|
||||
const results = await searchFiles(validPath, parsed.data.pattern, parsed.data.excludePatterns);
|
||||
return {
|
||||
content: [{ type: "text", text: results.length > 0 ? results.join("\n") : "No matches found" }],
|
||||
};
|
||||
}
|
||||
|
||||
case "get_file_info": {
|
||||
const parsed = GetFileInfoArgsSchema.safeParse(args);
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for get_file_info: ${parsed.error}`);
|
||||
}
|
||||
const validPath = await validatePath(parsed.data.path);
|
||||
const info = await getFileStats(validPath);
|
||||
return {
|
||||
content: [{ type: "text", text: Object.entries(info)
|
||||
.map(([key, value]) => `${key}: ${value}`)
|
||||
.join("\n") }],
|
||||
};
|
||||
}
|
||||
|
||||
case "list_allowed_directories": {
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Allowed directories:\n${allowedDirectories.join('\n')}`
|
||||
}],
|
||||
};
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown tool: ${name}`);
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
return {
|
||||
content: [{ type: "text", text: `Error: ${errorMessage}` }],
|
||||
isError: true,
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
// Start server
|
||||
async function runServer() {
|
||||
const transport = new StdioServerTransport();
|
||||
await server.connect(transport);
|
||||
console.error("Secure MCP Filesystem Server running on stdio");
|
||||
console.error("Allowed directories:", allowedDirectories);
|
||||
}
|
||||
|
||||
runServer().catch((error) => {
|
||||
server.start().catch((error) => {
|
||||
console.error("Fatal error running server:", error);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
65
src/filesystem/path-utils.ts
Normal file
65
src/filesystem/path-utils.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { FileSystemDependencies } from './types.js';
|
||||
|
||||
export class PathUtils {
|
||||
constructor(private deps: FileSystemDependencies) {}
|
||||
|
||||
normalizePath(p: string): string {
|
||||
return this.deps.path.normalize(p);
|
||||
}
|
||||
|
||||
expandHome(filepath: string): string {
|
||||
if (filepath.startsWith('~/') || filepath === '~') {
|
||||
return this.deps.path.join(this.deps.os.homedir(), filepath.slice(1));
|
||||
}
|
||||
return filepath;
|
||||
}
|
||||
|
||||
async validatePath(requestedPath: string, allowedDirectories: string[]): Promise<string> {
|
||||
const expandedPath = this.expandHome(requestedPath);
|
||||
const absolute = this.deps.path.isAbsolute(expandedPath)
|
||||
? this.deps.path.resolve(expandedPath)
|
||||
: this.deps.path.resolve(process.cwd(), expandedPath);
|
||||
|
||||
const normalizedRequested = this.normalizePath(absolute);
|
||||
|
||||
// First check if requested path is within allowed directories
|
||||
const isAllowed = allowedDirectories.some(dir => normalizedRequested.startsWith(dir));
|
||||
if (!isAllowed) {
|
||||
throw new Error(`Access denied - path outside allowed directories: ${absolute} not in ${allowedDirectories.join(', ')}`);
|
||||
}
|
||||
|
||||
try {
|
||||
// Try to resolve the real path (handles symlinks)
|
||||
const realPath = await this.deps.fs.realpath(absolute);
|
||||
|
||||
// If we got here, the file exists - check if its real path is allowed
|
||||
const normalizedReal = this.normalizePath(realPath);
|
||||
const isRealPathAllowed = allowedDirectories.some(dir => normalizedReal.startsWith(dir));
|
||||
if (!isRealPathAllowed) {
|
||||
throw new Error("Access denied - symlink target outside allowed directories");
|
||||
}
|
||||
|
||||
return realPath;
|
||||
} catch (error) {
|
||||
if (error instanceof Error) {
|
||||
if (error.message.includes('ENOENT')) {
|
||||
// For non-existent files, just validate the parent directory exists
|
||||
const parentDir = this.deps.path.dirname(absolute);
|
||||
try {
|
||||
await this.deps.fs.realpath(parentDir);
|
||||
return absolute;
|
||||
} catch (parentError) {
|
||||
// If parent directory doesn't exist, propagate the original ENOENT
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// Re-throw access denied errors
|
||||
if (error.message.includes('Access denied')) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// Re-throw any other errors
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
386
src/filesystem/server.ts
Normal file
386
src/filesystem/server.ts
Normal file
@@ -0,0 +1,386 @@
|
||||
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
||||
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
||||
import {
|
||||
CallToolRequestSchema,
|
||||
ListToolsRequestSchema,
|
||||
ToolSchema,
|
||||
} from "@modelcontextprotocol/sdk/types.js";
|
||||
import { z } from "zod";
|
||||
import { zodToJsonSchema } from "zod-to-json-schema";
|
||||
import { FileSystemDependencies } from './types.js';
|
||||
import { PathUtils } from './path-utils.js';
|
||||
import { FileOperations } from './file-operations.js';
|
||||
import { FileEditor } from './file-editor.js';
|
||||
import {
|
||||
ReadFileArgsSchema,
|
||||
ReadMultipleFilesArgsSchema,
|
||||
WriteFileArgsSchema,
|
||||
EditFileArgsSchema,
|
||||
CreateDirectoryArgsSchema,
|
||||
ListDirectoryArgsSchema,
|
||||
DirectoryTreeArgsSchema,
|
||||
MoveFileArgsSchema,
|
||||
SearchFilesArgsSchema,
|
||||
GetFileInfoArgsSchema,
|
||||
} from './types.js';
|
||||
|
||||
const ToolInputSchema = ToolSchema.shape.inputSchema;
|
||||
type ToolInput = z.infer<typeof ToolInputSchema>;
|
||||
|
||||
export class FilesystemServer {
|
||||
private server: Server;
|
||||
private pathUtils: PathUtils;
|
||||
private fileOps: FileOperations;
|
||||
private fileEditor: FileEditor;
|
||||
|
||||
constructor(
|
||||
private allowedDirectories: string[],
|
||||
private deps: FileSystemDependencies = {
|
||||
fs: require('fs/promises'),
|
||||
path: require('path'),
|
||||
os: require('os')
|
||||
}
|
||||
) {
|
||||
this.pathUtils = new PathUtils(deps);
|
||||
this.fileOps = new FileOperations(deps, this.pathUtils, allowedDirectories);
|
||||
this.fileEditor = new FileEditor(deps, this.pathUtils, allowedDirectories);
|
||||
|
||||
this.server = new Server(
|
||||
{
|
||||
name: "secure-filesystem-server",
|
||||
version: "0.2.0",
|
||||
},
|
||||
{
|
||||
capabilities: {
|
||||
tools: {},
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
async validateDirectories(): Promise<void> {
|
||||
await Promise.all(this.allowedDirectories.map(async (dir) => {
|
||||
const expandedDir = this.pathUtils.expandHome(dir);
|
||||
try {
|
||||
const stats = await this.deps.fs.stat(expandedDir);
|
||||
if (!stats.isDirectory()) {
|
||||
throw new Error(`Error: ${dir} is not a directory`);
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(`Error accessing directory ${dir}: ${error}`);
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
setupHandlers(): void {
|
||||
this.server.setRequestHandler(ListToolsRequestSchema, this.handleListTools.bind(this));
|
||||
this.server.setRequestHandler(CallToolRequestSchema, this.handleCallTool.bind(this));
|
||||
}
|
||||
|
||||
private async handleListTools() {
|
||||
return {
|
||||
tools: [
|
||||
{
|
||||
name: "read_file",
|
||||
description:
|
||||
"Read the complete contents of a file from the file system. " +
|
||||
"Handles various text encodings and provides detailed error messages " +
|
||||
"if the file cannot be read. Use this tool when you need to examine " +
|
||||
"the contents of a single file. Only works within allowed directories.",
|
||||
inputSchema: zodToJsonSchema(ReadFileArgsSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
name: "read_multiple_files",
|
||||
description:
|
||||
"Read the contents of multiple files simultaneously. This is more " +
|
||||
"efficient than reading files one by one when you need to analyze " +
|
||||
"or compare multiple files. Each file's content is returned with its " +
|
||||
"path as a reference. Failed reads for individual files won't stop " +
|
||||
"the entire operation. Only works within allowed directories.",
|
||||
inputSchema: zodToJsonSchema(ReadMultipleFilesArgsSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
name: "write_file",
|
||||
description:
|
||||
"Create a new file or completely overwrite an existing file with new content. " +
|
||||
"Use with caution as it will overwrite existing files without warning. " +
|
||||
"Handles text content with proper encoding. Only works within allowed directories.",
|
||||
inputSchema: zodToJsonSchema(WriteFileArgsSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
name: "edit_file",
|
||||
description:
|
||||
"Make line-based edits to a text file. Each edit replaces exact line sequences " +
|
||||
"with new content. Returns a git-style diff showing the changes made. " +
|
||||
"Only works within allowed directories.",
|
||||
inputSchema: zodToJsonSchema(EditFileArgsSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
name: "create_directory",
|
||||
description:
|
||||
"Create a new directory or ensure a directory exists. Can create multiple " +
|
||||
"nested directories in one operation. If the directory already exists, " +
|
||||
"this operation will succeed silently. Perfect for setting up directory " +
|
||||
"structures for projects or ensuring required paths exist. Only works within allowed directories.",
|
||||
inputSchema: zodToJsonSchema(CreateDirectoryArgsSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
name: "list_directory",
|
||||
description:
|
||||
"Get a detailed listing of all files and directories in a specified path. " +
|
||||
"Results clearly distinguish between files and directories with [FILE] and [DIR] " +
|
||||
"prefixes. This tool is essential for understanding directory structure and " +
|
||||
"finding specific files within a directory. Only works within allowed directories.",
|
||||
inputSchema: zodToJsonSchema(ListDirectoryArgsSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
name: "directory_tree",
|
||||
description:
|
||||
"Get a recursive tree view of files and directories as a JSON structure. " +
|
||||
"Each entry includes 'name', 'type' (file/directory), and 'children' for directories. " +
|
||||
"Files have no children array, while directories always have a children array (which may be empty). " +
|
||||
"The output is formatted with 2-space indentation for readability. Only works within allowed directories.",
|
||||
inputSchema: zodToJsonSchema(DirectoryTreeArgsSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
name: "move_file",
|
||||
description:
|
||||
"Move or rename files and directories. Can move files between directories " +
|
||||
"and rename them in a single operation. If the destination exists, the " +
|
||||
"operation will fail. Works across different directories and can be used " +
|
||||
"for simple renaming within the same directory. Both source and destination must be within allowed directories.",
|
||||
inputSchema: zodToJsonSchema(MoveFileArgsSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
name: "search_files",
|
||||
description:
|
||||
"Recursively search for files and directories matching a pattern. " +
|
||||
"Searches through all subdirectories from the starting path. The search " +
|
||||
"is case-insensitive and matches partial names. Returns full paths to all " +
|
||||
"matching items. Great for finding files when you don't know their exact location. " +
|
||||
"Only searches within allowed directories.",
|
||||
inputSchema: zodToJsonSchema(SearchFilesArgsSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
name: "get_file_info",
|
||||
description:
|
||||
"Retrieve detailed metadata about a file or directory. Returns comprehensive " +
|
||||
"information including size, creation time, last modified time, permissions, " +
|
||||
"and type. This tool is perfect for understanding file characteristics " +
|
||||
"without reading the actual content. Only works within allowed directories.",
|
||||
inputSchema: zodToJsonSchema(GetFileInfoArgsSchema) as ToolInput,
|
||||
},
|
||||
{
|
||||
name: "list_allowed_directories",
|
||||
description:
|
||||
"Returns the list of directories that this server is allowed to access. " +
|
||||
"Use this to understand which directories are available before trying to access files.",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {},
|
||||
required: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
protected async handleCallTool(request: z.infer<typeof CallToolRequestSchema>) {
|
||||
try {
|
||||
const { name, arguments: args } = request.params;
|
||||
|
||||
switch (name) {
|
||||
case "read_file": {
|
||||
const parsed = ReadFileArgsSchema.safeParse(args);
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for read_file: ${parsed.error}`);
|
||||
}
|
||||
const validPath = await this.pathUtils.validatePath(parsed.data.path, this.allowedDirectories);
|
||||
const content = await this.deps.fs.readFile(validPath, "utf-8");
|
||||
return {
|
||||
content: [{ type: "text", text: content }],
|
||||
};
|
||||
}
|
||||
|
||||
case "read_multiple_files": {
|
||||
const parsed = ReadMultipleFilesArgsSchema.safeParse(args);
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for read_multiple_files: ${parsed.error}`);
|
||||
}
|
||||
const results = await Promise.all(
|
||||
parsed.data.paths.map(async (filePath: string) => {
|
||||
try {
|
||||
const validPath = await this.pathUtils.validatePath(filePath, this.allowedDirectories);
|
||||
const content = await this.deps.fs.readFile(validPath, "utf-8");
|
||||
return `${filePath}:\n${content}\n`;
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
return `${filePath}: Error - ${errorMessage}`;
|
||||
}
|
||||
}),
|
||||
);
|
||||
return {
|
||||
content: [{ type: "text", text: results.join("\n---\n") }],
|
||||
};
|
||||
}
|
||||
|
||||
case "write_file": {
|
||||
const parsed = WriteFileArgsSchema.safeParse(args);
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for write_file: ${parsed.error}`);
|
||||
}
|
||||
const validPath = await this.pathUtils.validatePath(parsed.data.path, this.allowedDirectories);
|
||||
await this.deps.fs.writeFile(validPath, parsed.data.content, "utf-8");
|
||||
return {
|
||||
content: [{ type: "text", text: `Successfully wrote to ${parsed.data.path}` }],
|
||||
};
|
||||
}
|
||||
|
||||
case "edit_file": {
|
||||
const parsed = EditFileArgsSchema.safeParse(args);
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for edit_file: ${parsed.error}`);
|
||||
}
|
||||
const result = await this.fileEditor.applyFileEdits(parsed.data.path, parsed.data.edits, parsed.data.dryRun);
|
||||
return {
|
||||
content: [{ type: "text", text: result }],
|
||||
};
|
||||
}
|
||||
|
||||
case "create_directory": {
|
||||
const parsed = CreateDirectoryArgsSchema.safeParse(args);
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for create_directory: ${parsed.error}`);
|
||||
}
|
||||
const validPath = await this.pathUtils.validatePath(parsed.data.path, this.allowedDirectories);
|
||||
await this.deps.fs.mkdir(validPath, { recursive: true });
|
||||
return {
|
||||
content: [{ type: "text", text: `Successfully created directory ${parsed.data.path}` }],
|
||||
};
|
||||
}
|
||||
|
||||
case "list_directory": {
|
||||
const parsed = ListDirectoryArgsSchema.safeParse(args);
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for list_directory: ${parsed.error}`);
|
||||
}
|
||||
const validPath = await this.pathUtils.validatePath(parsed.data.path, this.allowedDirectories);
|
||||
const entries = await this.deps.fs.readdir(validPath, { withFileTypes: true });
|
||||
const formatted = entries
|
||||
.map((entry) => `${entry.isDirectory() ? "[DIR]" : "[FILE]"} ${entry.name}`)
|
||||
.join("\n");
|
||||
return {
|
||||
content: [{ type: "text", text: formatted }],
|
||||
};
|
||||
}
|
||||
|
||||
case "directory_tree": {
|
||||
const parsed = DirectoryTreeArgsSchema.safeParse(args);
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for directory_tree: ${parsed.error}`);
|
||||
}
|
||||
|
||||
interface TreeEntry {
|
||||
name: string;
|
||||
type: 'file' | 'directory';
|
||||
children?: TreeEntry[];
|
||||
}
|
||||
|
||||
const buildTree = async (currentPath: string): Promise<TreeEntry[]> => {
|
||||
const validPath = await this.pathUtils.validatePath(currentPath, this.allowedDirectories);
|
||||
const entries = await this.deps.fs.readdir(validPath, { withFileTypes: true });
|
||||
const result: TreeEntry[] = [];
|
||||
|
||||
for (const entry of entries) {
|
||||
const entryData: TreeEntry = {
|
||||
name: entry.name,
|
||||
type: entry.isDirectory() ? 'directory' : 'file'
|
||||
};
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
const subPath = this.deps.path.join(currentPath, entry.name);
|
||||
entryData.children = await buildTree(subPath);
|
||||
}
|
||||
|
||||
result.push(entryData);
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
const treeData = await buildTree(parsed.data.path);
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: JSON.stringify(treeData, null, 2)
|
||||
}],
|
||||
};
|
||||
}
|
||||
|
||||
case "move_file": {
|
||||
const parsed = MoveFileArgsSchema.safeParse(args);
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for move_file: ${parsed.error}`);
|
||||
}
|
||||
const validSourcePath = await this.pathUtils.validatePath(parsed.data.source, this.allowedDirectories);
|
||||
const validDestPath = await this.pathUtils.validatePath(parsed.data.destination, this.allowedDirectories);
|
||||
await this.deps.fs.rename(validSourcePath, validDestPath);
|
||||
return {
|
||||
content: [{ type: "text", text: `Successfully moved ${parsed.data.source} to ${parsed.data.destination}` }],
|
||||
};
|
||||
}
|
||||
|
||||
case "search_files": {
|
||||
const parsed = SearchFilesArgsSchema.safeParse(args);
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for search_files: ${parsed.error}`);
|
||||
}
|
||||
const results = await this.fileOps.searchFiles(parsed.data.path, parsed.data.pattern, parsed.data.excludePatterns);
|
||||
return {
|
||||
content: [{ type: "text", text: results.length > 0 ? results.join("\n") : "No matches found" }],
|
||||
};
|
||||
}
|
||||
|
||||
case "get_file_info": {
|
||||
const parsed = GetFileInfoArgsSchema.safeParse(args);
|
||||
if (!parsed.success) {
|
||||
throw new Error(`Invalid arguments for get_file_info: ${parsed.error}`);
|
||||
}
|
||||
const info = await this.fileOps.getFileStats(parsed.data.path);
|
||||
return {
|
||||
content: [{ type: "text", text: Object.entries(info)
|
||||
.map(([key, value]) => `${key}: ${value}`)
|
||||
.join("\n") }],
|
||||
};
|
||||
}
|
||||
|
||||
case "list_allowed_directories": {
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Allowed directories:\n${this.allowedDirectories.join('\n')}`
|
||||
}],
|
||||
};
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown tool: ${name}`);
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
return {
|
||||
content: [{ type: "text", text: `Error: ${errorMessage}` }],
|
||||
isError: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async start(): Promise<void> {
|
||||
await this.validateDirectories();
|
||||
this.setupHandlers();
|
||||
const transport = new StdioServerTransport();
|
||||
await this.server.connect(transport);
|
||||
console.error("Secure MCP Filesystem Server running on stdio");
|
||||
console.error("Allowed directories:", this.allowedDirectories);
|
||||
}
|
||||
}
|
||||
69
src/filesystem/types.ts
Normal file
69
src/filesystem/types.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import { z } from "zod";
|
||||
|
||||
export interface FileInfo {
|
||||
size: number;
|
||||
created: Date;
|
||||
modified: Date;
|
||||
accessed: Date;
|
||||
isDirectory: boolean;
|
||||
isFile: boolean;
|
||||
permissions: string;
|
||||
}
|
||||
|
||||
export interface FileSystemDependencies {
|
||||
fs: typeof import('fs/promises');
|
||||
path: typeof import('path');
|
||||
os: typeof import('os');
|
||||
}
|
||||
|
||||
// Schema definitions
|
||||
export const ReadFileArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
});
|
||||
|
||||
export const ReadMultipleFilesArgsSchema = z.object({
|
||||
paths: z.array(z.string()),
|
||||
});
|
||||
|
||||
export const WriteFileArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
content: z.string(),
|
||||
});
|
||||
|
||||
export const EditOperation = z.object({
|
||||
oldText: z.string().describe('Text to search for - must match exactly'),
|
||||
newText: z.string().describe('Text to replace with')
|
||||
});
|
||||
|
||||
export const EditFileArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
edits: z.array(EditOperation),
|
||||
dryRun: z.boolean().default(false).describe('Preview changes using git-style diff format')
|
||||
});
|
||||
|
||||
export const CreateDirectoryArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
});
|
||||
|
||||
export const ListDirectoryArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
});
|
||||
|
||||
export const DirectoryTreeArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
});
|
||||
|
||||
export const MoveFileArgsSchema = z.object({
|
||||
source: z.string(),
|
||||
destination: z.string(),
|
||||
});
|
||||
|
||||
export const SearchFilesArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
pattern: z.string(),
|
||||
excludePatterns: z.array(z.string()).optional().default([])
|
||||
});
|
||||
|
||||
export const GetFileInfoArgsSchema = z.object({
|
||||
path: z.string(),
|
||||
});
|
||||
@@ -7,7 +7,8 @@
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"resolveJsonModule": true
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules"]
|
||||
|
||||
Reference in New Issue
Block a user