536 lines
15 KiB
JavaScript
536 lines
15 KiB
JavaScript
/**
|
|
* Backup Manager - Resilient backup execution system
|
|
* Node.js implementation
|
|
*/
|
|
|
|
const fs = require('fs-extra');
|
|
const path = require('path');
|
|
const { spawn } = require('child_process');
|
|
|
|
class BackupManager {
|
|
constructor(projectRoot = '.') {
|
|
this.projectRoot = path.resolve(projectRoot);
|
|
this.backupDir = path.join(this.projectRoot, '.claude_hooks', 'backups');
|
|
fs.ensureDirSync(this.backupDir);
|
|
|
|
// Backup settings
|
|
this.maxBackups = 10;
|
|
this.logFile = path.join(this.backupDir, 'backup.log');
|
|
}
|
|
|
|
/**
|
|
* Execute backup with comprehensive error handling
|
|
*/
|
|
async executeBackup(decision, sessionState) {
|
|
const backupId = this._generateBackupId();
|
|
const backupPath = path.join(this.backupDir, backupId);
|
|
|
|
try {
|
|
// Create backup structure
|
|
const backupInfo = await this._createBackupStructure(backupPath, sessionState);
|
|
|
|
// Git backup (if possible)
|
|
const gitResult = await this._attemptGitBackup(backupId, decision.reason);
|
|
|
|
// File system backup
|
|
const fsResult = await this._createFilesystemBackup(backupPath, sessionState);
|
|
|
|
// Session state backup
|
|
const stateResult = await this._backupSessionState(backupPath, sessionState);
|
|
|
|
// Clean up old backups
|
|
await this._cleanupOldBackups();
|
|
|
|
// Log successful backup
|
|
await this._logBackup(backupId, decision, true);
|
|
|
|
return {
|
|
success: true,
|
|
backupId,
|
|
backupPath,
|
|
gitSuccess: gitResult.success,
|
|
components: {
|
|
git: gitResult,
|
|
filesystem: fsResult,
|
|
sessionState: stateResult
|
|
}
|
|
};
|
|
|
|
} catch (error) {
|
|
// Backup failures should never break the session
|
|
const fallbackResult = await this._createMinimalBackup(sessionState);
|
|
await this._logBackup(backupId, decision, false, error.message);
|
|
|
|
return {
|
|
success: false,
|
|
backupId,
|
|
error: error.message,
|
|
fallbackPerformed: fallbackResult
|
|
};
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Create backup from project path (external API)
|
|
*/
|
|
async createBackup(projectPath, context = {}, force = false) {
|
|
const decision = {
|
|
reason: context.trigger || 'manual',
|
|
urgency: 'medium',
|
|
force
|
|
};
|
|
|
|
const sessionState = {
|
|
modifiedFiles: context.modified_files || [],
|
|
toolUsage: context.tool_usage || {},
|
|
timestamp: new Date().toISOString(),
|
|
...context
|
|
};
|
|
|
|
const result = await this.executeBackup(decision, sessionState);
|
|
return result.success ? result.backupId : null;
|
|
}
|
|
|
|
/**
|
|
* Get backup information
|
|
*/
|
|
async getBackupInfo(backupId) {
|
|
try {
|
|
const backupPath = path.join(this.backupDir, backupId);
|
|
const metadataFile = path.join(backupPath, 'metadata.json');
|
|
|
|
if (await fs.pathExists(metadataFile)) {
|
|
const metadata = await fs.readJson(metadataFile);
|
|
|
|
// Add file list if available
|
|
const filesDir = path.join(backupPath, 'files');
|
|
if (await fs.pathExists(filesDir)) {
|
|
const files = await this._getBackupFiles(filesDir);
|
|
metadata.files_backed_up = files;
|
|
}
|
|
|
|
return metadata;
|
|
}
|
|
} catch (error) {
|
|
console.error('Error reading backup info:', error.message);
|
|
}
|
|
|
|
return null;
|
|
}
|
|
|
|
/**
|
|
* Generate unique backup identifier
|
|
*/
|
|
_generateBackupId() {
|
|
const timestamp = new Date().toISOString()
|
|
.replace(/[:-]/g, '')
|
|
.replace(/\.\d{3}Z$/, '')
|
|
.replace('T', '_');
|
|
return `backup_${timestamp}`;
|
|
}
|
|
|
|
/**
|
|
* Create basic backup directory structure
|
|
*/
|
|
async _createBackupStructure(backupPath, sessionState) {
|
|
await fs.ensureDir(backupPath);
|
|
|
|
// Create subdirectories
|
|
await fs.ensureDir(path.join(backupPath, 'files'));
|
|
await fs.ensureDir(path.join(backupPath, 'logs'));
|
|
await fs.ensureDir(path.join(backupPath, 'state'));
|
|
|
|
// Create backup metadata
|
|
const metadata = {
|
|
backup_id: path.basename(backupPath),
|
|
timestamp: new Date().toISOString(),
|
|
session_state: sessionState,
|
|
project_root: this.projectRoot
|
|
};
|
|
|
|
await fs.writeJson(path.join(backupPath, 'metadata.json'), metadata, { spaces: 2 });
|
|
|
|
return metadata;
|
|
}
|
|
|
|
/**
|
|
* Attempt git backup with proper error handling
|
|
*/
|
|
async _attemptGitBackup(backupId, reason) {
|
|
try {
|
|
// Check if git repo exists
|
|
if (!(await fs.pathExists(path.join(this.projectRoot, '.git')))) {
|
|
// Initialize repo if none exists
|
|
const initResult = await this._runGitCommand(['init']);
|
|
if (!initResult.success) {
|
|
return {
|
|
success: false,
|
|
error: `Git init failed: ${initResult.error}`
|
|
};
|
|
}
|
|
}
|
|
|
|
// Add all changes
|
|
const addResult = await this._runGitCommand(['add', '-A']);
|
|
if (!addResult.success) {
|
|
return {
|
|
success: false,
|
|
error: `Git add failed: ${addResult.error}`
|
|
};
|
|
}
|
|
|
|
// Check if there are changes to commit
|
|
const statusResult = await this._runGitCommand(['status', '--porcelain']);
|
|
if (statusResult.success && !statusResult.stdout.trim()) {
|
|
return {
|
|
success: true,
|
|
message: 'No changes to commit'
|
|
};
|
|
}
|
|
|
|
// Create commit
|
|
const commitMsg = `Claude hooks auto-backup: ${reason} (${backupId})`;
|
|
const commitResult = await this._runGitCommand(['commit', '-m', commitMsg]);
|
|
|
|
if (!commitResult.success) {
|
|
return {
|
|
success: false,
|
|
error: `Git commit failed: ${commitResult.error}`
|
|
};
|
|
}
|
|
|
|
// Get commit ID
|
|
const commitId = await this._getLatestCommit();
|
|
|
|
return {
|
|
success: true,
|
|
commitId,
|
|
message: `Committed as ${commitId.substring(0, 8)}`
|
|
};
|
|
|
|
} catch (error) {
|
|
return {
|
|
success: false,
|
|
error: `Unexpected git error: ${error.message}`
|
|
};
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Run git command with timeout and error handling
|
|
*/
|
|
_runGitCommand(args, timeoutMs = 60000) {
|
|
return new Promise((resolve) => {
|
|
const child = spawn('git', args, {
|
|
cwd: this.projectRoot,
|
|
stdio: ['pipe', 'pipe', 'pipe']
|
|
});
|
|
|
|
let stdout = '';
|
|
let stderr = '';
|
|
|
|
child.stdout.on('data', (data) => {
|
|
stdout += data.toString();
|
|
});
|
|
|
|
child.stderr.on('data', (data) => {
|
|
stderr += data.toString();
|
|
});
|
|
|
|
const timeout = setTimeout(() => {
|
|
child.kill();
|
|
resolve({
|
|
success: false,
|
|
error: 'Git operation timed out'
|
|
});
|
|
}, timeoutMs);
|
|
|
|
child.on('close', (code) => {
|
|
clearTimeout(timeout);
|
|
resolve({
|
|
success: code === 0,
|
|
stdout: stdout.trim(),
|
|
stderr: stderr.trim(),
|
|
error: code !== 0 ? stderr.trim() : null
|
|
});
|
|
});
|
|
|
|
child.on('error', (error) => {
|
|
clearTimeout(timeout);
|
|
resolve({
|
|
success: false,
|
|
error: error.message
|
|
});
|
|
});
|
|
});
|
|
}
|
|
|
|
/**
|
|
* Get the latest commit ID
|
|
*/
|
|
async _getLatestCommit() {
|
|
try {
|
|
const result = await this._runGitCommand(['rev-parse', 'HEAD'], 10000);
|
|
return result.success ? result.stdout : 'unknown';
|
|
} catch (error) {
|
|
return 'unknown';
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Create filesystem backup of important files
|
|
*/
|
|
async _createFilesystemBackup(backupPath, sessionState) {
|
|
try {
|
|
const filesDir = path.join(backupPath, 'files');
|
|
await fs.ensureDir(filesDir);
|
|
|
|
// Backup modified files mentioned in session
|
|
const modifiedFiles = sessionState.modifiedFiles || sessionState.modified_files || [];
|
|
const filesBackedUp = [];
|
|
|
|
for (const filePath of modifiedFiles) {
|
|
try {
|
|
const src = path.resolve(filePath);
|
|
if (await fs.pathExists(src) && (await fs.stat(src)).isFile()) {
|
|
// Create relative path structure
|
|
const relativePath = path.relative(this.projectRoot, src);
|
|
const dst = path.join(filesDir, relativePath);
|
|
|
|
await fs.ensureDir(path.dirname(dst));
|
|
await fs.copy(src, dst, { preserveTimestamps: true });
|
|
filesBackedUp.push(src);
|
|
}
|
|
} catch (error) {
|
|
// Log error but continue with other files
|
|
await this._logFileBackupError(filePath, error);
|
|
}
|
|
}
|
|
|
|
// Backup important project files
|
|
const importantFiles = [
|
|
'package.json', 'requirements.txt', 'Cargo.toml',
|
|
'pyproject.toml', 'setup.py', '.gitignore',
|
|
'README.md', 'CLAUDE.md'
|
|
];
|
|
|
|
for (const fileName of importantFiles) {
|
|
const filePath = path.join(this.projectRoot, fileName);
|
|
if (await fs.pathExists(filePath)) {
|
|
try {
|
|
const dst = path.join(filesDir, fileName);
|
|
await fs.copy(filePath, dst, { preserveTimestamps: true });
|
|
filesBackedUp.push(filePath);
|
|
} catch (error) {
|
|
// Not critical
|
|
}
|
|
}
|
|
}
|
|
|
|
return {
|
|
success: true,
|
|
message: `Backed up ${filesBackedUp.length} files`,
|
|
metadata: { files: filesBackedUp }
|
|
};
|
|
|
|
} catch (error) {
|
|
return { success: false, error: error.message };
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Backup session state and context
|
|
*/
|
|
async _backupSessionState(backupPath, sessionState) {
|
|
try {
|
|
const stateDir = path.join(backupPath, 'state');
|
|
|
|
// Save session state
|
|
await fs.writeJson(path.join(stateDir, 'session.json'), sessionState, { spaces: 2 });
|
|
|
|
// Copy hook logs if they exist
|
|
const logsSource = path.join(this.projectRoot, '.claude_hooks', 'logs');
|
|
if (await fs.pathExists(logsSource)) {
|
|
const logsDest = path.join(backupPath, 'logs');
|
|
await fs.copy(logsSource, logsDest);
|
|
}
|
|
|
|
// Copy patterns database
|
|
const patternsSource = path.join(this.projectRoot, '.claude_hooks', 'patterns');
|
|
if (await fs.pathExists(patternsSource)) {
|
|
const patternsDest = path.join(stateDir, 'patterns');
|
|
await fs.copy(patternsSource, patternsDest);
|
|
}
|
|
|
|
return {
|
|
success: true,
|
|
message: 'Session state backed up'
|
|
};
|
|
|
|
} catch (error) {
|
|
return { success: false, error: error.message };
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Create minimal backup when full backup fails
|
|
*/
|
|
async _createMinimalBackup(sessionState) {
|
|
try {
|
|
// At minimum, save session state to a simple file
|
|
const emergencyFile = path.join(this.backupDir, 'emergency_backup.json');
|
|
|
|
const emergencyData = {
|
|
timestamp: new Date().toISOString(),
|
|
session_state: sessionState,
|
|
type: 'emergency_backup'
|
|
};
|
|
|
|
await fs.writeJson(emergencyFile, emergencyData, { spaces: 2 });
|
|
return true;
|
|
|
|
} catch (error) {
|
|
return false;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Remove old backups to save space
|
|
*/
|
|
async _cleanupOldBackups() {
|
|
try {
|
|
// Get all backup directories
|
|
const entries = await fs.readdir(this.backupDir, { withFileTypes: true });
|
|
const backupDirs = entries
|
|
.filter(entry => entry.isDirectory() && entry.name.startsWith('backup_'))
|
|
.map(entry => ({
|
|
name: entry.name,
|
|
path: path.join(this.backupDir, entry.name)
|
|
}));
|
|
|
|
// Sort by creation time (newest first)
|
|
const backupsWithStats = await Promise.all(
|
|
backupDirs.map(async (backup) => {
|
|
const stats = await fs.stat(backup.path);
|
|
return { ...backup, mtime: stats.mtime };
|
|
})
|
|
);
|
|
|
|
backupsWithStats.sort((a, b) => b.mtime - a.mtime);
|
|
|
|
// Remove old backups beyond maxBackups
|
|
const oldBackups = backupsWithStats.slice(this.maxBackups);
|
|
for (const oldBackup of oldBackups) {
|
|
await fs.remove(oldBackup.path);
|
|
}
|
|
|
|
} catch (error) {
|
|
// Cleanup failures shouldn't break backup
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Log backup operation
|
|
*/
|
|
async _logBackup(backupId, decision, success, error = '') {
|
|
try {
|
|
const logEntry = {
|
|
timestamp: new Date().toISOString(),
|
|
backup_id: backupId,
|
|
reason: decision.reason,
|
|
urgency: decision.urgency,
|
|
success,
|
|
error
|
|
};
|
|
|
|
// Append to log file
|
|
await fs.appendFile(this.logFile, JSON.stringify(logEntry) + '\n');
|
|
|
|
} catch (error) {
|
|
// Logging failures shouldn't break backup
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Log file backup errors
|
|
*/
|
|
async _logFileBackupError(filePath, error) {
|
|
try {
|
|
const errorEntry = {
|
|
timestamp: new Date().toISOString(),
|
|
type: 'file_backup_error',
|
|
file_path: filePath,
|
|
error: error.message
|
|
};
|
|
|
|
await fs.appendFile(this.logFile, JSON.stringify(errorEntry) + '\n');
|
|
|
|
} catch (error) {
|
|
// Ignore logging errors
|
|
}
|
|
}
|
|
|
|
/**
|
|
* List available backups
|
|
*/
|
|
async listBackups() {
|
|
const backups = [];
|
|
|
|
try {
|
|
const entries = await fs.readdir(this.backupDir, { withFileTypes: true });
|
|
const backupDirs = entries
|
|
.filter(entry => entry.isDirectory() && entry.name.startsWith('backup_'))
|
|
.map(entry => path.join(this.backupDir, entry.name));
|
|
|
|
for (const backupDir of backupDirs) {
|
|
const metadataFile = path.join(backupDir, 'metadata.json');
|
|
if (await fs.pathExists(metadataFile)) {
|
|
try {
|
|
const metadata = await fs.readJson(metadataFile);
|
|
backups.push(metadata);
|
|
} catch (error) {
|
|
// Skip corrupted metadata
|
|
}
|
|
}
|
|
}
|
|
|
|
} catch (error) {
|
|
// Return empty list on error
|
|
}
|
|
|
|
return backups.sort((a, b) => {
|
|
const timeA = a.timestamp || '';
|
|
const timeB = b.timestamp || '';
|
|
return timeB.localeCompare(timeA);
|
|
});
|
|
}
|
|
|
|
/**
|
|
* Get list of files in backup
|
|
*/
|
|
async _getBackupFiles(filesDir, relativeTo = filesDir) {
|
|
const files = [];
|
|
|
|
try {
|
|
const entries = await fs.readdir(filesDir, { withFileTypes: true });
|
|
|
|
for (const entry of entries) {
|
|
const fullPath = path.join(filesDir, entry.name);
|
|
const relativePath = path.relative(relativeTo, fullPath);
|
|
|
|
if (entry.isDirectory()) {
|
|
const subFiles = await this._getBackupFiles(fullPath, relativeTo);
|
|
files.push(...subFiles);
|
|
} else {
|
|
files.push(relativePath);
|
|
}
|
|
}
|
|
} catch (error) {
|
|
// Return what we have
|
|
}
|
|
|
|
return files;
|
|
}
|
|
}
|
|
|
|
module.exports = { BackupManager }; |