Coverage for me2ai_mcp\tools\filesystem.py: 0%

130 statements  

« prev     ^ index     » next       coverage.py v7.8.0, created at 2025-04-13 11:30 +0200

1""" 

2Filesystem tools for ME2AI MCP servers. 

3 

4This module provides common tools for file and directory operations 

5that can be used across different MCP servers. 

6""" 

7from typing import Dict, List, Any, Optional 

8import os 

9import logging 

10from dataclasses import dataclass 

11from pathlib import Path 

12import glob 

13from ..base import BaseTool 

14 

15# Configure logging 

16logger = logging.getLogger("me2ai-mcp-tools-filesystem") 

17 

18 

19@dataclass 

20class FileReaderTool(BaseTool): 

21 """Tool for reading file content.""" 

22 

23 name: str = "read_file" 

24 description: str = "Read content from a file" 

25 max_file_size: int = 1024 * 1024 * 5 # 5MB 

26 

27 async def execute(self, params: Dict[str, Any]) -> Dict[str, Any]: 

28 """Read a file and return its content. 

29  

30 Args: 

31 params: Dictionary containing: 

32 - file_path: Path to the file to read 

33 - encoding: File encoding (default: utf-8) 

34 - binary: Whether to read as binary (default: False) 

35  

36 Returns: 

37 Dictionary containing file content and metadata 

38 """ 

39 file_path = params.get("file_path") 

40 if not file_path: 

41 return { 

42 "success": False, 

43 "error": "file_path parameter is required" 

44 } 

45 

46 encoding = params.get("encoding", "utf-8") 

47 binary = params.get("binary", False) 

48 

49 try: 

50 # Normalize path 

51 file_path = os.path.abspath(file_path) 

52 

53 # Check if file exists 

54 if not os.path.exists(file_path): 

55 return { 

56 "success": False, 

57 "error": f"File not found: {file_path}" 

58 } 

59 

60 # Check if path is a file 

61 if not os.path.isfile(file_path): 

62 return { 

63 "success": False, 

64 "error": f"Path is not a file: {file_path}" 

65 } 

66 

67 # Check file size 

68 file_size = os.path.getsize(file_path) 

69 if file_size > self.max_file_size: 

70 return { 

71 "success": False, 

72 "error": f"File too large: {file_size} bytes (max {self.max_file_size})" 

73 } 

74 

75 # Read file content 

76 if binary: 

77 with open(file_path, "rb") as f: 

78 content = f.read() 

79 # Convert binary to base64 for JSON compatibility 

80 import base64 

81 content = base64.b64encode(content).decode("utf-8") 

82 else: 

83 with open(file_path, "r", encoding=encoding) as f: 

84 content = f.read() 

85 

86 # Get file stats 

87 stats = os.stat(file_path) 

88 

89 # Return results 

90 return { 

91 "success": True, 

92 "file_path": file_path, 

93 "content": content, 

94 "size": file_size, 

95 "encoding": encoding if not binary else None, 

96 "binary": binary, 

97 "metadata": { 

98 "created": stats.st_ctime, 

99 "modified": stats.st_mtime, 

100 "accessed": stats.st_atime, 

101 "extension": os.path.splitext(file_path)[1], 

102 "filename": os.path.basename(file_path) 

103 } 

104 } 

105 

106 except Exception as e: 

107 logger.error(f"Error reading file {file_path}: {str(e)}") 

108 return { 

109 "success": False, 

110 "error": f"Error reading file: {str(e)}", 

111 "exception_type": type(e).__name__ 

112 } 

113 

114 

115@dataclass 

116class FileWriterTool(BaseTool): 

117 """Tool for writing content to files.""" 

118 

119 name: str = "write_file" 

120 description: str = "Write content to a file" 

121 

122 async def execute(self, params: Dict[str, Any]) -> Dict[str, Any]: 

123 """Write content to a file. 

124  

125 Args: 

126 params: Dictionary containing: 

127 - file_path: Path to the file to write 

128 - content: Content to write 

129 - encoding: File encoding (default: utf-8) 

130 - binary: Whether content is binary (base64-encoded) (default: False) 

131 - overwrite: Whether to overwrite existing file (default: False) 

132 - append: Whether to append to existing file (default: False) 

133  

134 Returns: 

135 Dictionary containing operation result 

136 """ 

137 file_path = params.get("file_path") 

138 content = params.get("content") 

139 

140 if not file_path: 

141 return { 

142 "success": False, 

143 "error": "file_path parameter is required" 

144 } 

145 

146 if content is None: 

147 return { 

148 "success": False, 

149 "error": "content parameter is required" 

150 } 

151 

152 encoding = params.get("encoding", "utf-8") 

153 binary = params.get("binary", False) 

154 overwrite = params.get("overwrite", False) 

155 append = params.get("append", False) 

156 

157 try: 

158 # Normalize path 

159 file_path = os.path.abspath(file_path) 

160 

161 # Check if file exists 

162 file_exists = os.path.exists(file_path) 

163 

164 if file_exists and not (overwrite or append): 

165 return { 

166 "success": False, 

167 "error": f"File already exists: {file_path} (set overwrite=True to replace or append=True to add content)" 

168 } 

169 

170 # Create parent directories if they don't exist 

171 os.makedirs(os.path.dirname(file_path), exist_ok=True) 

172 

173 # Write content 

174 mode = "wb" if binary else "w" 

175 if append: 

176 mode = "ab" if binary else "a" 

177 

178 if binary: 

179 # Decode base64 content 

180 import base64 

181 binary_content = base64.b64decode(content) 

182 with open(file_path, mode) as f: 

183 f.write(binary_content) 

184 else: 

185 with open(file_path, mode, encoding=encoding) as f: 

186 f.write(content) 

187 

188 # Get file stats 

189 stats = os.stat(file_path) 

190 

191 # Return results 

192 return { 

193 "success": True, 

194 "file_path": file_path, 

195 "size": os.path.getsize(file_path), 

196 "operation": "append" if append else ("overwrite" if file_exists else "create"), 

197 "metadata": { 

198 "created": stats.st_ctime, 

199 "modified": stats.st_mtime, 

200 "extension": os.path.splitext(file_path)[1], 

201 "filename": os.path.basename(file_path) 

202 } 

203 } 

204 

205 except Exception as e: 

206 logger.error(f"Error writing to file {file_path}: {str(e)}") 

207 return { 

208 "success": False, 

209 "error": f"Error writing to file: {str(e)}", 

210 "exception_type": type(e).__name__ 

211 } 

212 

213 

214@dataclass 

215class DirectoryListerTool(BaseTool): 

216 """Tool for listing directory contents.""" 

217 

218 name: str = "list_directory" 

219 description: str = "List contents of a directory" 

220 

221 async def execute(self, params: Dict[str, Any]) -> Dict[str, Any]: 

222 """List the contents of a directory. 

223  

224 Args: 

225 params: Dictionary containing: 

226 - directory_path: Path to the directory to list 

227 - pattern: Optional glob pattern to filter results 

228 - recursive: Whether to list subdirectories recursively (default: False) 

229 - include_hidden: Whether to include hidden files (default: False) 

230 - max_depth: Maximum recursion depth (default: 1) 

231  

232 Returns: 

233 Dictionary containing directory contents 

234 """ 

235 directory_path = params.get("directory_path") 

236 if not directory_path: 

237 return { 

238 "success": False, 

239 "error": "directory_path parameter is required" 

240 } 

241 

242 pattern = params.get("pattern") 

243 recursive = params.get("recursive", False) 

244 include_hidden = params.get("include_hidden", False) 

245 max_depth = params.get("max_depth", 1) 

246 

247 try: 

248 # Normalize path 

249 directory_path = os.path.abspath(directory_path) 

250 

251 # Check if directory exists 

252 if not os.path.exists(directory_path): 

253 return { 

254 "success": False, 

255 "error": f"Directory not found: {directory_path}" 

256 } 

257 

258 # Check if path is a directory 

259 if not os.path.isdir(directory_path): 

260 return { 

261 "success": False, 

262 "error": f"Path is not a directory: {directory_path}" 

263 } 

264 

265 # List directory contents 

266 items = [] 

267 

268 if recursive: 

269 # Recursive listing with max_depth control 

270 for root, dirs, files in os.walk(directory_path): 

271 # Calculate current depth 

272 depth = root[len(directory_path):].count(os.sep) 

273 if depth > max_depth - 1: 

274 continue 

275 

276 # Skip hidden directories if not included 

277 if not include_hidden: 

278 dirs[:] = [d for d in dirs if not d.startswith(".")] 

279 

280 # Process files 

281 for file in files: 

282 # Skip hidden files if not included 

283 if not include_hidden and file.startswith("."): 

284 continue 

285 

286 file_path = os.path.join(root, file) 

287 

288 # Apply pattern filter if specified 

289 if pattern and not glob.fnmatch.fnmatch(file, pattern): 

290 continue 

291 

292 # Add file info 

293 stats = os.stat(file_path) 

294 items.append({ 

295 "name": file, 

296 "path": file_path, 

297 "type": "file", 

298 "size": stats.st_size, 

299 "created": stats.st_ctime, 

300 "modified": stats.st_mtime, 

301 "relative_path": os.path.relpath(file_path, directory_path) 

302 }) 

303 

304 # Add directory info 

305 for dir_name in dirs: 

306 dir_path = os.path.join(root, dir_name) 

307 

308 # Skip hidden directories if not included 

309 if not include_hidden and dir_name.startswith("."): 

310 continue 

311 

312 stats = os.stat(dir_path) 

313 items.append({ 

314 "name": dir_name, 

315 "path": dir_path, 

316 "type": "directory", 

317 "size": None, 

318 "created": stats.st_ctime, 

319 "modified": stats.st_mtime, 

320 "relative_path": os.path.relpath(dir_path, directory_path) 

321 }) 

322 else: 

323 # Non-recursive listing 

324 for item in os.listdir(directory_path): 

325 # Skip hidden items if not included 

326 if not include_hidden and item.startswith("."): 

327 continue 

328 

329 item_path = os.path.join(directory_path, item) 

330 

331 # Apply pattern filter for files 

332 if pattern and os.path.isfile(item_path) and not glob.fnmatch.fnmatch(item, pattern): 

333 continue 

334 

335 # Add item info 

336 stats = os.stat(item_path) 

337 is_dir = os.path.isdir(item_path) 

338 

339 items.append({ 

340 "name": item, 

341 "path": item_path, 

342 "type": "directory" if is_dir else "file", 

343 "size": None if is_dir else stats.st_size, 

344 "created": stats.st_ctime, 

345 "modified": stats.st_mtime, 

346 "relative_path": item 

347 }) 

348 

349 # Sort items: directories first, then files, both alphabetically 

350 items.sort(key=lambda x: (0 if x["type"] == "directory" else 1, x["name"].lower())) 

351 

352 # Return results 

353 return { 

354 "success": True, 

355 "directory_path": directory_path, 

356 "pattern": pattern, 

357 "recursive": recursive, 

358 "items": items, 

359 "count": len(items) 

360 } 

361 

362 except Exception as e: 

363 logger.error(f"Error listing directory {directory_path}: {str(e)}") 

364 return { 

365 "success": False, 

366 "error": f"Error listing directory: {str(e)}", 

367 "exception_type": type(e).__name__ 

368 }