Loading...
Automatically backs up changed files to cloud storage when session ends
{
"hookConfig": {
"hooks": {
"stop": {
"script": "./.claude/hooks/cloud-backup-on-session-stop.sh"
}
}
},
"scriptContent": "#!/usr/bin/env bash\n\necho \"☁️ Starting cloud backup process...\" >&2\n\n# Generate timestamped backup directory name\nBACKUP_DIR=\"claude-backup-$(date +%Y%m%d_%H%M%S)\"\n\n# Get list of modified files\nMODIFIED_FILES=$(git diff --name-only 2>/dev/null)\n\nif [ -z \"$MODIFIED_FILES\" ]; then\n echo \"📂 No modified files to backup\" >&2\n exit 0\nfi\n\necho \"📦 Found modified files to backup\" >&2\n\n# Try AWS S3 first\nif command -v aws >/dev/null 2>&1 && [[ -n \"$AWS_BACKUP_BUCKET\" ]]; then\n echo \"📦 Backing up to AWS S3...\" >&2\n if echo \"$MODIFIED_FILES\" | tar -czf - -T - | aws s3 cp - \"s3://$AWS_BACKUP_BUCKET/$BACKUP_DIR.tar.gz\"; then\n echo \"✅ Successfully backed up to S3: $AWS_BACKUP_BUCKET/$BACKUP_DIR.tar.gz\" >&2\n exit 0\n else\n echo \"❌ AWS S3 backup failed\" >&2\n fi\nfi\n\n# Try Google Cloud Storage\nif command -v gcloud >/dev/null 2>&1 && [[ -n \"$GCS_BACKUP_BUCKET\" ]]; then\n echo \"📦 Backing up to Google Cloud Storage...\" >&2\n if echo \"$MODIFIED_FILES\" | tar -czf - -T - | gsutil cp - \"gs://$GCS_BACKUP_BUCKET/$BACKUP_DIR.tar.gz\"; then\n echo \"✅ Successfully backed up to GCS: $GCS_BACKUP_BUCKET/$BACKUP_DIR.tar.gz\" >&2\n exit 0\n else\n echo \"❌ Google Cloud backup failed\" >&2\n fi\nfi\n\n# Try rclone as universal fallback\nif command -v rclone >/dev/null 2>&1; then\n echo \"📦 Backing up using rclone...\" >&2\n TEMP_BACKUP=\"/tmp/$BACKUP_DIR.tar.gz\"\n if echo \"$MODIFIED_FILES\" | tar -czf \"$TEMP_BACKUP\" -T - && rclone copy \"$TEMP_BACKUP\" remote:backups/; then\n echo \"✅ Successfully backed up using rclone\" >&2\n rm -f \"$TEMP_BACKUP\"\n exit 0\n else\n echo \"❌ rclone backup failed\" >&2\n rm -f \"$TEMP_BACKUP\"\n fi\nfi\n\necho \"⚠️ No cloud storage provider configured or available\" >&2\necho \"💡 Configure AWS_BACKUP_BUCKET, GCS_BACKUP_BUCKET, or rclone to enable cloud backup\" >&2\nexit 1"
}.claude/hooks/~/.claude/hooks/{
"hooks": {
"stop": {
"script": "./.claude/hooks/cloud-backup-on-session-stop.sh"
}
}
}#!/usr/bin/env bash
echo "☁️ Starting cloud backup process..." >&2
# Generate timestamped backup directory name
BACKUP_DIR="claude-backup-$(date +%Y%m%d_%H%M%S)"
# Get list of modified files
MODIFIED_FILES=$(git diff --name-only 2>/dev/null)
if [ -z "$MODIFIED_FILES" ]; then
echo "📂 No modified files to backup" >&2
exit 0
fi
echo "📦 Found modified files to backup" >&2
# Try AWS S3 first
if command -v aws >/dev/null 2>&1 && [[ -n "$AWS_BACKUP_BUCKET" ]]; then
echo "📦 Backing up to AWS S3..." >&2
if echo "$MODIFIED_FILES" | tar -czf - -T - | aws s3 cp - "s3://$AWS_BACKUP_BUCKET/$BACKUP_DIR.tar.gz"; then
echo "✅ Successfully backed up to S3: $AWS_BACKUP_BUCKET/$BACKUP_DIR.tar.gz" >&2
exit 0
else
echo "❌ AWS S3 backup failed" >&2
fi
fi
# Try Google Cloud Storage
if command -v gcloud >/dev/null 2>&1 && [[ -n "$GCS_BACKUP_BUCKET" ]]; then
echo "📦 Backing up to Google Cloud Storage..." >&2
if echo "$MODIFIED_FILES" | tar -czf - -T - | gsutil cp - "gs://$GCS_BACKUP_BUCKET/$BACKUP_DIR.tar.gz"; then
echo "✅ Successfully backed up to GCS: $GCS_BACKUP_BUCKET/$BACKUP_DIR.tar.gz" >&2
exit 0
else
echo "❌ Google Cloud backup failed" >&2
fi
fi
# Try rclone as universal fallback
if command -v rclone >/dev/null 2>&1; then
echo "📦 Backing up using rclone..." >&2
TEMP_BACKUP="/tmp/$BACKUP_DIR.tar.gz"
if echo "$MODIFIED_FILES" | tar -czf "$TEMP_BACKUP" -T - && rclone copy "$TEMP_BACKUP" remote:backups/; then
echo "✅ Successfully backed up using rclone" >&2
rm -f "$TEMP_BACKUP"
exit 0
else
echo "❌ rclone backup failed" >&2
rm -f "$TEMP_BACKUP"
fi
fi
echo "⚠️ No cloud storage provider configured or available" >&2
echo "💡 Configure AWS_BACKUP_BUCKET, GCS_BACKUP_BUCKET, or rclone to enable cloud backup" >&2
exit 1Stop hook not triggering when Claude session ends
Verify hook script is executable with chmod +x and registered in .claude/config.json. Check hook script path matches config. Ensure session ends cleanly without force quit.
AWS S3 backup fails with permission denied error
Configure AWS_BACKUP_BUCKET environment variable in .env file. Verify AWS credentials with aws s3 ls. Check IAM permissions allow s3:PutObject action on target bucket.
Git diff returns no modified files despite changes
Ensure files are tracked by git. Run git status to verify changes exist. Stage files with git add if needed. Check hook runs after file operations complete, not during.
Backup archive creation hangs on large file sets
Use .gitignore to exclude node_modules and build artifacts from git tracking. Consider implementing file size filtering in hook script. Add timeout parameter to tar command.
Multiple cloud providers configured but rclone used
Hook tries AWS S3 first, then Google Cloud, then rclone. Check AWS_BACKUP_BUCKET and GCS_BACKUP_BUCKET variables are set. Verify aws or gcloud CLI tools are in PATH and authenticated.
Loading reviews...
Join our community of Claude power users. No spam, unsubscribe anytime.
Automated accessibility testing and compliance checking for web applications following WCAG guidelines
Automatically generates or updates API documentation when endpoint files are modified
Automatically formats code files after Claude writes or edits them using Prettier, Black, or other formatters