Some checks failed
CI / Code Quality (push) Failing after 17s
CI / Test (ubuntu-latest, 3.10) (push) Failing after 5s
CI / Test (ubuntu-latest, 3.11) (push) Failing after 4s
CI / Test (ubuntu-latest, 3.12) (push) Failing after 4s
CI / Test (ubuntu-latest, 3.13) (push) Failing after 4s
CI / Coverage (push) Failing after 25s
CI / Test (macos-latest, 3.13) (push) Has been cancelled
CI / Test (macos-latest, 3.10) (push) Has been cancelled
CI / Test (macos-latest, 3.11) (push) Has been cancelled
CI / Test (macos-latest, 3.12) (push) Has been cancelled
CI / Test (windows-latest, 3.10) (push) Has been cancelled
CI / Test (windows-latest, 3.11) (push) Has been cancelled
CI / Test (windows-latest, 3.12) (push) Has been cancelled
CI / Test (windows-latest, 3.13) (push) Has been cancelled
✨ Features: - 50+ development tools across 13 specialized categories - ⚡ Sneller Analytics: High-performance vectorized SQL (TB/s throughput) - 🎬 Asciinema Integration: Terminal recording and sharing - 🧠 AI-Powered Recommendations: Intelligent tool suggestions - 🔀 Advanced Git Integration: Smart operations with AI suggestions - 📁 Enhanced File Operations: Monitoring, bulk ops, backups - 🔍 Semantic Code Search: AST-based intelligent analysis - 🏗️ Development Workflow: Testing, linting, formatting - 🌐 Network & API Tools: HTTP client, mock servers - 📦 Archive & Compression: Multi-format operations - 🔬 Process Tracing: System call monitoring - 🌍 Environment Management: Virtual envs, dependencies 🎯 Ready for production with comprehensive documentation and MCP Inspector support!
244 lines
8.4 KiB
Python
244 lines
8.4 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Demo script showing the comprehensive directory tree JSON output capabilities
|
|
"""
|
|
|
|
import asyncio
|
|
import json
|
|
import tempfile
|
|
from pathlib import Path
|
|
|
|
from enhanced_mcp.file_operations import EnhancedFileOperations
|
|
|
|
|
|
async def demo_directory_tree_json():
|
|
"""Demonstrate the comprehensive JSON directory tree functionality"""
|
|
|
|
print("🌳 Enhanced MCP Tools - Directory Tree JSON Demo")
|
|
print("=" * 60)
|
|
|
|
# Initialize the file operations class
|
|
file_ops = EnhancedFileOperations()
|
|
|
|
# Create a sample directory structure for demo
|
|
with tempfile.TemporaryDirectory() as temp_dir:
|
|
temp_path = Path(temp_dir)
|
|
print(f"📁 Demo directory: {temp_path}")
|
|
|
|
# Create sample structure
|
|
(temp_path / "src").mkdir()
|
|
(temp_path / "src" / "main.py").write_text(
|
|
"""#!/usr/bin/env python3
|
|
def main():
|
|
print("Hello, World!")
|
|
|
|
if __name__ == "__main__":
|
|
main()
|
|
"""
|
|
)
|
|
|
|
(temp_path / "src" / "utils.py").write_text(
|
|
"""def helper_function():
|
|
return "This is a helper"
|
|
"""
|
|
)
|
|
|
|
(temp_path / "docs").mkdir()
|
|
(temp_path / "docs" / "README.md").write_text(
|
|
"# Project Documentation\n\nThis is the main documentation."
|
|
)
|
|
(temp_path / "docs" / "api.md").write_text("# API Reference\n\nAPI documentation here.")
|
|
|
|
(temp_path / "config").mkdir()
|
|
(temp_path / "config" / "settings.json").write_text('{"debug": true, "version": "1.0.0"}')
|
|
|
|
# Hidden file
|
|
(temp_path / ".gitignore").write_text("*.pyc\n__pycache__/\n.env\n")
|
|
|
|
# Large file
|
|
(temp_path / "large_file.txt").write_text("X" * 10000) # 10KB file
|
|
|
|
print("📋 Created sample project structure")
|
|
|
|
# Demonstrate different scanning modes
|
|
demos = [
|
|
{
|
|
"name": "Complete Metadata Scan",
|
|
"params": {
|
|
"root_path": str(temp_path),
|
|
"include_hidden": True,
|
|
"include_metadata": True,
|
|
"exclude_patterns": None,
|
|
},
|
|
},
|
|
{
|
|
"name": "Production-Ready Scan",
|
|
"params": {
|
|
"root_path": str(temp_path),
|
|
"include_hidden": False,
|
|
"include_metadata": True,
|
|
"exclude_patterns": ["*.pyc", "__pycache__", ".env"],
|
|
"max_depth": 3,
|
|
},
|
|
},
|
|
{
|
|
"name": "Large Files Only",
|
|
"params": {
|
|
"root_path": str(temp_path),
|
|
"include_hidden": False,
|
|
"include_metadata": True,
|
|
"size_threshold_mb": 0.005, # 5KB threshold
|
|
},
|
|
},
|
|
{
|
|
"name": "Minimal Structure",
|
|
"params": {
|
|
"root_path": str(temp_path),
|
|
"include_hidden": False,
|
|
"include_metadata": False,
|
|
"max_depth": 2,
|
|
},
|
|
},
|
|
]
|
|
|
|
for demo in demos:
|
|
print(f"\n=== {demo['name']} ===")
|
|
|
|
result = await file_ops.list_directory_tree(**demo["params"])
|
|
|
|
if "error" in result:
|
|
print(f"❌ Error: {result['error']}")
|
|
continue
|
|
|
|
print("✅ Scan completed successfully")
|
|
print(f"📊 Summary: {result['summary']['total_items']} items found")
|
|
|
|
# Show JSON structure sample
|
|
print("📄 JSON Output Structure:")
|
|
|
|
# Pretty print the result with limited depth to avoid overwhelming output
|
|
def limit_json_depth(obj, max_depth=2, current_depth=0):
|
|
"""Limit JSON depth for display purposes"""
|
|
if current_depth >= max_depth:
|
|
if isinstance(obj, dict):
|
|
return {"...": "truncated"}
|
|
elif isinstance(obj, list):
|
|
return ["...truncated..."]
|
|
else:
|
|
return obj
|
|
|
|
if isinstance(obj, dict):
|
|
return {
|
|
k: limit_json_depth(v, max_depth, current_depth + 1) for k, v in obj.items()
|
|
}
|
|
elif isinstance(obj, list):
|
|
return [
|
|
limit_json_depth(item, max_depth, current_depth + 1) for item in obj[:3]
|
|
] + (["...more..."] if len(obj) > 3 else [])
|
|
else:
|
|
return obj
|
|
|
|
limited_result = limit_json_depth(result, max_depth=3)
|
|
print(json.dumps(limited_result, indent=2))
|
|
|
|
# Demonstrate real-world usage examples
|
|
print("\n=== Real-World Usage Examples ===")
|
|
|
|
# Example 1: Find all Python files
|
|
print("\n🐍 Finding all Python files:")
|
|
python_scan = await file_ops.list_directory_tree(
|
|
root_path=str(temp_path),
|
|
include_hidden=False,
|
|
include_metadata=True,
|
|
exclude_patterns=["*.pyc", "__pycache__"],
|
|
)
|
|
|
|
def find_files_by_extension(node, extension, files=None):
|
|
if files is None:
|
|
files = []
|
|
|
|
if node["type"] == "file" and node["name"].endswith(extension):
|
|
files.append(
|
|
{
|
|
"path": node["path"],
|
|
"size": node.get("size_human", "unknown"),
|
|
"modified": node.get("modified_iso", "unknown"),
|
|
}
|
|
)
|
|
|
|
if "children" in node:
|
|
for child in node["children"]:
|
|
find_files_by_extension(child, extension, files)
|
|
|
|
return files
|
|
|
|
python_files = find_files_by_extension(python_scan["tree"], ".py")
|
|
for py_file in python_files:
|
|
print(
|
|
f" 📄 {py_file['path']} ({py_file['size']}) - Modified: {py_file['modified'][:10]}"
|
|
)
|
|
|
|
# Example 2: Calculate directory sizes
|
|
print("\n📊 Directory sizes:")
|
|
size_scan = await file_ops.list_directory_tree(
|
|
root_path=str(temp_path), include_metadata=True
|
|
)
|
|
|
|
def get_directory_sizes(node, sizes=None):
|
|
if sizes is None:
|
|
sizes = {}
|
|
|
|
if node["type"] == "directory":
|
|
total_size = node.get("total_size_bytes", 0)
|
|
sizes[node["name"]] = {
|
|
"size_bytes": total_size,
|
|
"size_human": node.get("total_size_human", "0 B"),
|
|
"child_count": node.get("child_count", 0),
|
|
}
|
|
|
|
if "children" in node:
|
|
for child in node["children"]:
|
|
get_directory_sizes(child, sizes)
|
|
|
|
return sizes
|
|
|
|
dir_sizes = get_directory_sizes(size_scan["tree"])
|
|
for dir_name, info in dir_sizes.items():
|
|
print(f" 📁 {dir_name}: {info['size_human']} ({info['child_count']} items)")
|
|
|
|
# Example 3: Export to JSON file
|
|
print("\n💾 Exporting complete structure to JSON file:")
|
|
output_file = temp_path / "directory_structure.json"
|
|
|
|
complete_scan = await file_ops.list_directory_tree(
|
|
root_path=str(temp_path), include_hidden=True, include_metadata=True
|
|
)
|
|
|
|
with open(output_file, "w") as f:
|
|
json.dump(complete_scan, f, indent=2)
|
|
|
|
print(f" ✅ Exported to: {output_file}")
|
|
print(f" 📊 File size: {output_file.stat().st_size} bytes")
|
|
|
|
# Verify the exported file
|
|
with open(output_file) as f:
|
|
imported_data = json.load(f)
|
|
print(
|
|
f" ✅ Verification: {imported_data['summary']['total_items']} items in exported JSON"
|
|
)
|
|
|
|
print("\n🎯 Use Cases Demonstrated:")
|
|
print(" • 📁 Complete directory metadata collection")
|
|
print(" • 🔍 File filtering and search capabilities")
|
|
print(" • 📊 Directory size analysis")
|
|
print(" • 💾 JSON export for external tools")
|
|
print(" • 🚀 Integration with build/CI systems")
|
|
print(" • 📈 Project analysis and reporting")
|
|
|
|
print("\n🎉 Directory Tree JSON Demo completed!")
|
|
print("✅ Ready for production use with comprehensive metadata!")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
asyncio.run(demo_directory_tree_json())
|