✨ Features: - 🧠 Shadow learner that builds intelligence from command patterns - 🛡️ Smart command validation with safety checks - 💾 Automatic context monitoring and backup system - 🔄 Session continuity across Claude restarts 📚 Documentation: - Complete Diátaxis-organized documentation - Learning-oriented tutorial for getting started - Task-oriented how-to guides for specific problems - Information-oriented reference for quick lookup - Understanding-oriented explanations of architecture 🚀 Installation: - One-command installation script - Bootstrap prompt for installation via Claude - Cross-platform compatibility - Comprehensive testing suite 🎯 Ready for real-world use and community feedback! 🤖 Generated with Claude Code Co-Authored-By: Claude <noreply@anthropic.com>
388 lines
14 KiB
Python
388 lines
14 KiB
Python
#!/usr/bin/env python3
|
|
"""Backup Manager - Resilient backup execution system"""
|
|
|
|
import os
|
|
import json
|
|
import shutil
|
|
import subprocess
|
|
from datetime import datetime
|
|
from pathlib import Path
|
|
from typing import Dict, Any, List
|
|
|
|
try:
|
|
from .models import BackupResult, GitBackupResult, BackupDecision
|
|
except ImportError:
|
|
from models import BackupResult, GitBackupResult, BackupDecision
|
|
|
|
|
|
class BackupManager:
|
|
"""Handles backup execution with comprehensive error handling"""
|
|
|
|
def __init__(self, project_root: str = "."):
|
|
self.project_root = Path(project_root).resolve()
|
|
self.backup_dir = self.project_root / ".claude_hooks" / "backups"
|
|
self.backup_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
# Backup settings
|
|
self.max_backups = 10
|
|
self.log_file = self.backup_dir / "backup.log"
|
|
|
|
def execute_backup(self, decision: BackupDecision,
|
|
session_state: Dict[str, Any]) -> BackupResult:
|
|
"""Execute backup with comprehensive error handling"""
|
|
|
|
backup_id = self._generate_backup_id()
|
|
backup_path = self.backup_dir / backup_id
|
|
|
|
try:
|
|
# Create backup structure
|
|
backup_info = self._create_backup_structure(backup_path, session_state)
|
|
|
|
# Git backup (if possible)
|
|
git_result = self._attempt_git_backup(backup_id, decision.reason)
|
|
|
|
# File system backup
|
|
fs_result = self._create_filesystem_backup(backup_path, session_state)
|
|
|
|
# Session state backup
|
|
state_result = self._backup_session_state(backup_path, session_state)
|
|
|
|
# Clean up old backups
|
|
self._cleanup_old_backups()
|
|
|
|
# Log successful backup
|
|
self._log_backup(backup_id, decision, success=True)
|
|
|
|
return BackupResult(
|
|
success=True,
|
|
backup_id=backup_id,
|
|
backup_path=str(backup_path),
|
|
git_success=git_result.success,
|
|
components={
|
|
"git": git_result,
|
|
"filesystem": fs_result,
|
|
"session_state": state_result
|
|
}
|
|
)
|
|
|
|
except Exception as e:
|
|
# Backup failures should never break the session
|
|
fallback_result = self._create_minimal_backup(session_state)
|
|
self._log_backup(backup_id, decision, success=False, error=str(e))
|
|
|
|
return BackupResult(
|
|
success=False,
|
|
backup_id=backup_id,
|
|
error=str(e),
|
|
fallback_performed=fallback_result
|
|
)
|
|
|
|
def _generate_backup_id(self) -> str:
|
|
"""Generate unique backup identifier"""
|
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
return f"backup_{timestamp}"
|
|
|
|
def _create_backup_structure(self, backup_path: Path, session_state: Dict[str, Any]) -> Dict[str, Any]:
|
|
"""Create basic backup directory structure"""
|
|
backup_path.mkdir(parents=True, exist_ok=True)
|
|
|
|
# Create subdirectories
|
|
(backup_path / "files").mkdir(exist_ok=True)
|
|
(backup_path / "logs").mkdir(exist_ok=True)
|
|
(backup_path / "state").mkdir(exist_ok=True)
|
|
|
|
# Create backup metadata
|
|
metadata = {
|
|
"backup_id": backup_path.name,
|
|
"timestamp": datetime.now().isoformat(),
|
|
"session_state": session_state,
|
|
"project_root": str(self.project_root)
|
|
}
|
|
|
|
with open(backup_path / "metadata.json", 'w') as f:
|
|
json.dump(metadata, f, indent=2)
|
|
|
|
return metadata
|
|
|
|
def _attempt_git_backup(self, backup_id: str, reason: str) -> GitBackupResult:
|
|
"""Attempt git backup with proper error handling"""
|
|
try:
|
|
# Check if git repo exists
|
|
if not (self.project_root / ".git").exists():
|
|
# Initialize repo if none exists
|
|
result = subprocess.run(
|
|
["git", "init"],
|
|
cwd=self.project_root,
|
|
capture_output=True,
|
|
text=True,
|
|
timeout=30
|
|
)
|
|
if result.returncode != 0:
|
|
return GitBackupResult(
|
|
success=False,
|
|
error=f"Git init failed: {result.stderr}"
|
|
)
|
|
|
|
# Add all changes
|
|
result = subprocess.run(
|
|
["git", "add", "-A"],
|
|
cwd=self.project_root,
|
|
capture_output=True,
|
|
text=True,
|
|
timeout=60
|
|
)
|
|
if result.returncode != 0:
|
|
return GitBackupResult(
|
|
success=False,
|
|
error=f"Git add failed: {result.stderr}"
|
|
)
|
|
|
|
# Check if there are changes to commit
|
|
result = subprocess.run(
|
|
["git", "status", "--porcelain"],
|
|
cwd=self.project_root,
|
|
capture_output=True,
|
|
text=True,
|
|
timeout=30
|
|
)
|
|
|
|
if not result.stdout.strip():
|
|
return GitBackupResult(
|
|
success=True,
|
|
message="No changes to commit"
|
|
)
|
|
|
|
# Create commit
|
|
commit_msg = f"Claude hooks auto-backup: {reason} ({backup_id})"
|
|
result = subprocess.run(
|
|
["git", "commit", "-m", commit_msg],
|
|
cwd=self.project_root,
|
|
capture_output=True,
|
|
text=True,
|
|
timeout=60
|
|
)
|
|
|
|
if result.returncode != 0:
|
|
return GitBackupResult(
|
|
success=False,
|
|
error=f"Git commit failed: {result.stderr}"
|
|
)
|
|
|
|
# Get commit ID
|
|
commit_id = self._get_latest_commit()
|
|
|
|
return GitBackupResult(
|
|
success=True,
|
|
commit_id=commit_id,
|
|
message=f"Committed as {commit_id[:8]}"
|
|
)
|
|
|
|
except subprocess.TimeoutExpired:
|
|
return GitBackupResult(
|
|
success=False,
|
|
error="Git operation timed out"
|
|
)
|
|
except subprocess.CalledProcessError as e:
|
|
return GitBackupResult(
|
|
success=False,
|
|
error=f"Git error: {e}"
|
|
)
|
|
except Exception as e:
|
|
return GitBackupResult(
|
|
success=False,
|
|
error=f"Unexpected git error: {e}"
|
|
)
|
|
|
|
def _get_latest_commit(self) -> str:
|
|
"""Get the latest commit ID"""
|
|
try:
|
|
result = subprocess.run(
|
|
["git", "rev-parse", "HEAD"],
|
|
cwd=self.project_root,
|
|
capture_output=True,
|
|
text=True,
|
|
timeout=10
|
|
)
|
|
if result.returncode == 0:
|
|
return result.stdout.strip()
|
|
except Exception:
|
|
pass
|
|
return "unknown"
|
|
|
|
def _create_filesystem_backup(self, backup_path: Path,
|
|
session_state: Dict[str, Any]) -> BackupResult:
|
|
"""Create filesystem backup of important files"""
|
|
try:
|
|
files_dir = backup_path / "files"
|
|
files_dir.mkdir(exist_ok=True)
|
|
|
|
# Backup modified files mentioned in session
|
|
modified_files = session_state.get("modified_files", [])
|
|
files_backed_up = []
|
|
|
|
for file_path in modified_files:
|
|
try:
|
|
src = Path(file_path)
|
|
if src.exists() and src.is_file():
|
|
# Create relative path structure
|
|
rel_path = src.relative_to(self.project_root) if src.is_relative_to(self.project_root) else src.name
|
|
dst = files_dir / rel_path
|
|
dst.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
shutil.copy2(src, dst)
|
|
files_backed_up.append(str(src))
|
|
except Exception as e:
|
|
# Log error but continue with other files
|
|
self._log_file_backup_error(file_path, e)
|
|
|
|
# Backup important project files
|
|
important_files = [
|
|
"package.json", "requirements.txt", "Cargo.toml",
|
|
"pyproject.toml", "setup.py", ".gitignore",
|
|
"README.md", "CLAUDE.md"
|
|
]
|
|
|
|
for file_name in important_files:
|
|
file_path = self.project_root / file_name
|
|
if file_path.exists():
|
|
try:
|
|
dst = files_dir / file_name
|
|
shutil.copy2(file_path, dst)
|
|
files_backed_up.append(str(file_path))
|
|
except Exception:
|
|
pass # Not critical
|
|
|
|
return BackupResult(
|
|
success=True,
|
|
message=f"Backed up {len(files_backed_up)} files",
|
|
metadata={"files": files_backed_up}
|
|
)
|
|
|
|
except Exception as e:
|
|
return BackupResult(success=False, error=str(e))
|
|
|
|
def _backup_session_state(self, backup_path: Path,
|
|
session_state: Dict[str, Any]) -> BackupResult:
|
|
"""Backup session state and context"""
|
|
try:
|
|
state_dir = backup_path / "state"
|
|
|
|
# Save session state
|
|
with open(state_dir / "session.json", 'w') as f:
|
|
json.dump(session_state, f, indent=2)
|
|
|
|
# Copy hook logs if they exist
|
|
logs_source = self.project_root / ".claude_hooks" / "logs"
|
|
if logs_source.exists():
|
|
logs_dest = backup_path / "logs"
|
|
shutil.copytree(logs_source, logs_dest, exist_ok=True)
|
|
|
|
# Copy patterns database
|
|
patterns_source = self.project_root / ".claude_hooks" / "patterns"
|
|
if patterns_source.exists():
|
|
patterns_dest = state_dir / "patterns"
|
|
shutil.copytree(patterns_source, patterns_dest, exist_ok=True)
|
|
|
|
return BackupResult(
|
|
success=True,
|
|
message="Session state backed up"
|
|
)
|
|
|
|
except Exception as e:
|
|
return BackupResult(success=False, error=str(e))
|
|
|
|
def _create_minimal_backup(self, session_state: Dict[str, Any]) -> bool:
|
|
"""Create minimal backup when full backup fails"""
|
|
try:
|
|
# At minimum, save session state to a simple file
|
|
emergency_file = self.backup_dir / "emergency_backup.json"
|
|
|
|
emergency_data = {
|
|
"timestamp": datetime.now().isoformat(),
|
|
"session_state": session_state,
|
|
"type": "emergency_backup"
|
|
}
|
|
|
|
with open(emergency_file, 'w') as f:
|
|
json.dump(emergency_data, f, indent=2)
|
|
|
|
return True
|
|
|
|
except Exception:
|
|
return False
|
|
|
|
def _cleanup_old_backups(self):
|
|
"""Remove old backups to save space"""
|
|
try:
|
|
# Get all backup directories
|
|
backup_dirs = [d for d in self.backup_dir.iterdir()
|
|
if d.is_dir() and d.name.startswith("backup_")]
|
|
|
|
# Sort by creation time (newest first)
|
|
backup_dirs.sort(key=lambda d: d.stat().st_mtime, reverse=True)
|
|
|
|
# Remove old backups beyond max_backups
|
|
for old_backup in backup_dirs[self.max_backups:]:
|
|
shutil.rmtree(old_backup)
|
|
|
|
except Exception:
|
|
pass # Cleanup failures shouldn't break backup
|
|
|
|
def _log_backup(self, backup_id: str, decision: BackupDecision,
|
|
success: bool, error: str = ""):
|
|
"""Log backup operation"""
|
|
try:
|
|
log_entry = {
|
|
"timestamp": datetime.now().isoformat(),
|
|
"backup_id": backup_id,
|
|
"reason": decision.reason,
|
|
"urgency": decision.urgency,
|
|
"success": success,
|
|
"error": error
|
|
}
|
|
|
|
# Append to log file
|
|
with open(self.log_file, 'a') as f:
|
|
f.write(json.dumps(log_entry) + "\n")
|
|
|
|
except Exception:
|
|
pass # Logging failures shouldn't break backup
|
|
|
|
def _log_file_backup_error(self, file_path: str, error: Exception):
|
|
"""Log file backup errors"""
|
|
try:
|
|
error_entry = {
|
|
"timestamp": datetime.now().isoformat(),
|
|
"type": "file_backup_error",
|
|
"file_path": file_path,
|
|
"error": str(error)
|
|
}
|
|
|
|
with open(self.log_file, 'a') as f:
|
|
f.write(json.dumps(error_entry) + "\n")
|
|
|
|
except Exception:
|
|
pass
|
|
|
|
def list_backups(self) -> List[Dict[str, Any]]:
|
|
"""List available backups"""
|
|
backups = []
|
|
|
|
try:
|
|
backup_dirs = [d for d in self.backup_dir.iterdir()
|
|
if d.is_dir() and d.name.startswith("backup_")]
|
|
|
|
for backup_dir in backup_dirs:
|
|
metadata_file = backup_dir / "metadata.json"
|
|
if metadata_file.exists():
|
|
try:
|
|
with open(metadata_file, 'r') as f:
|
|
metadata = json.load(f)
|
|
backups.append(metadata)
|
|
except Exception:
|
|
pass
|
|
|
|
except Exception:
|
|
pass
|
|
|
|
return sorted(backups, key=lambda b: b.get("timestamp", ""), reverse=True) |