#!/usr/bin/env python3 """ Demo script showing the comprehensive directory tree JSON output capabilities """ import asyncio import json import tempfile from pathlib import Path from enhanced_mcp.file_operations import EnhancedFileOperations async def demo_directory_tree_json(): """Demonstrate the comprehensive JSON directory tree functionality""" print("🌳 Enhanced MCP Tools - Directory Tree JSON Demo") print("=" * 60) # Initialize the file operations class file_ops = EnhancedFileOperations() # Create a sample directory structure for demo with tempfile.TemporaryDirectory() as temp_dir: temp_path = Path(temp_dir) print(f"šŸ“ Demo directory: {temp_path}") # Create sample structure (temp_path / "src").mkdir() (temp_path / "src" / "main.py").write_text( """#!/usr/bin/env python3 def main(): print("Hello, World!") if __name__ == "__main__": main() """ ) (temp_path / "src" / "utils.py").write_text( """def helper_function(): return "This is a helper" """ ) (temp_path / "docs").mkdir() (temp_path / "docs" / "README.md").write_text( "# Project Documentation\n\nThis is the main documentation." ) (temp_path / "docs" / "api.md").write_text("# API Reference\n\nAPI documentation here.") (temp_path / "config").mkdir() (temp_path / "config" / "settings.json").write_text('{"debug": true, "version": "1.0.0"}') # Hidden file (temp_path / ".gitignore").write_text("*.pyc\n__pycache__/\n.env\n") # Large file (temp_path / "large_file.txt").write_text("X" * 10000) # 10KB file print("šŸ“‹ Created sample project structure") # Demonstrate different scanning modes demos = [ { "name": "Complete Metadata Scan", "params": { "root_path": str(temp_path), "include_hidden": True, "include_metadata": True, "exclude_patterns": None, }, }, { "name": "Production-Ready Scan", "params": { "root_path": str(temp_path), "include_hidden": False, "include_metadata": True, "exclude_patterns": ["*.pyc", "__pycache__", ".env"], "max_depth": 3, }, }, { "name": "Large Files Only", "params": { "root_path": str(temp_path), "include_hidden": False, "include_metadata": True, "size_threshold_mb": 0.005, # 5KB threshold }, }, { "name": "Minimal Structure", "params": { "root_path": str(temp_path), "include_hidden": False, "include_metadata": False, "max_depth": 2, }, }, ] for demo in demos: print(f"\n=== {demo['name']} ===") result = await file_ops.list_directory_tree(**demo["params"]) if "error" in result: print(f"āŒ Error: {result['error']}") continue print("āœ… Scan completed successfully") print(f"šŸ“Š Summary: {result['summary']['total_items']} items found") # Show JSON structure sample print("šŸ“„ JSON Output Structure:") # Pretty print the result with limited depth to avoid overwhelming output def limit_json_depth(obj, max_depth=2, current_depth=0): """Limit JSON depth for display purposes""" if current_depth >= max_depth: if isinstance(obj, dict): return {"...": "truncated"} elif isinstance(obj, list): return ["...truncated..."] else: return obj if isinstance(obj, dict): return { k: limit_json_depth(v, max_depth, current_depth + 1) for k, v in obj.items() } elif isinstance(obj, list): return [ limit_json_depth(item, max_depth, current_depth + 1) for item in obj[:3] ] + (["...more..."] if len(obj) > 3 else []) else: return obj limited_result = limit_json_depth(result, max_depth=3) print(json.dumps(limited_result, indent=2)) # Demonstrate real-world usage examples print("\n=== Real-World Usage Examples ===") # Example 1: Find all Python files print("\nšŸ Finding all Python files:") python_scan = await file_ops.list_directory_tree( root_path=str(temp_path), include_hidden=False, include_metadata=True, exclude_patterns=["*.pyc", "__pycache__"], ) def find_files_by_extension(node, extension, files=None): if files is None: files = [] if node["type"] == "file" and node["name"].endswith(extension): files.append( { "path": node["path"], "size": node.get("size_human", "unknown"), "modified": node.get("modified_iso", "unknown"), } ) if "children" in node: for child in node["children"]: find_files_by_extension(child, extension, files) return files python_files = find_files_by_extension(python_scan["tree"], ".py") for py_file in python_files: print( f" šŸ“„ {py_file['path']} ({py_file['size']}) - Modified: {py_file['modified'][:10]}" ) # Example 2: Calculate directory sizes print("\nšŸ“Š Directory sizes:") size_scan = await file_ops.list_directory_tree( root_path=str(temp_path), include_metadata=True ) def get_directory_sizes(node, sizes=None): if sizes is None: sizes = {} if node["type"] == "directory": total_size = node.get("total_size_bytes", 0) sizes[node["name"]] = { "size_bytes": total_size, "size_human": node.get("total_size_human", "0 B"), "child_count": node.get("child_count", 0), } if "children" in node: for child in node["children"]: get_directory_sizes(child, sizes) return sizes dir_sizes = get_directory_sizes(size_scan["tree"]) for dir_name, info in dir_sizes.items(): print(f" šŸ“ {dir_name}: {info['size_human']} ({info['child_count']} items)") # Example 3: Export to JSON file print("\nšŸ’¾ Exporting complete structure to JSON file:") output_file = temp_path / "directory_structure.json" complete_scan = await file_ops.list_directory_tree( root_path=str(temp_path), include_hidden=True, include_metadata=True ) with open(output_file, "w") as f: json.dump(complete_scan, f, indent=2) print(f" āœ… Exported to: {output_file}") print(f" šŸ“Š File size: {output_file.stat().st_size} bytes") # Verify the exported file with open(output_file) as f: imported_data = json.load(f) print( f" āœ… Verification: {imported_data['summary']['total_items']} items in exported JSON" ) print("\nšŸŽÆ Use Cases Demonstrated:") print(" • šŸ“ Complete directory metadata collection") print(" • šŸ” File filtering and search capabilities") print(" • šŸ“Š Directory size analysis") print(" • šŸ’¾ JSON export for external tools") print(" • šŸš€ Integration with build/CI systems") print(" • šŸ“ˆ Project analysis and reporting") print("\nšŸŽ‰ Directory Tree JSON Demo completed!") print("āœ… Ready for production use with comprehensive metadata!") if __name__ == "__main__": asyncio.run(demo_directory_tree_json())