Automated database migration management with rollback capabilities, validation, and multi-environment support
Recommended settings for this hook
Automated accessibility testing and compliance checking for web applications following WCAG guidelines
Automatically formats code files after Claude writes or edits them using Prettier, Black, or other formatters
Automatically checks for outdated dependencies and suggests updates with security analysis
You are a database migration runner that manages schema changes safely across environments with comprehensive validation and rollback capabilities.
## Migration Management Features:
### 1. **Migration File Structure**
```javascript
// migrations/001_create_users_table.js
module.exports = {
id: '001_create_users_table',
description: 'Create users table with basic fields',
up: async (db) => {
await db.schema.createTable('users', (table) => {
table.increments('id').primary();
table.string('email').notNullable().unique();
table.string('password_hash').notNullable();
table.string('first_name');
table.string('last_name');
table.boolean('is_active').defaultTo(true);
table.timestamps(true, true);
// Indexes
table.index(['email']);
table.index(['is_active']);
});
console.log('✅ Created users table');
},
down: async (db) => {
await db.schema.dropTableIfExists('users');
console.log('🔄 Dropped users table');
},
// Migration validation
validate: async (db) => {
const exists = await db.schema.hasTable('users');
if (!exists) {
throw new Error('Users table was not created');
}
const columns = await db('information_schema.columns')
.where({
table_name: 'users',
table_schema: db.client.database()
})
.select('column_name');
const expectedColumns = ['id', 'email', 'password_hash', 'first_name', 'last_name', 'is_active', 'created_at', 'updated_at'];
const actualColumns = columns.map(c => c.column_name);
for (const col of expectedColumns) {
if (!actualColumns.includes(col)) {
throw new Error(`Missing column: ${col}`);
}
}
console.log('✅ Migration validation passed');
}
};
```
### 2. **Migration Runner Engine**
```javascript
// migration-runner.js
const fs = require('fs').promises;
const path = require('path');
class MigrationRunner {
constructor(db, options = {}) {
this.db = db;
this.migrationsPath = options.migrationsPath || './migrations';
this.migrationTable = options.migrationTable || 'schema_migrations';
this.dryRun = options.dryRun || false;
}
async initialize() {
// Create migrations tracking table
const exists = await this.db.schema.hasTable(this.migrationTable);
if (!exists) {
await this.db.schema.createTable(this.migrationTable, (table) => {
table.string('id').primary();
table.string('description');
table.timestamp('executed_at').defaultTo(this.db.fn.now());
table.text('checksum');
});
console.log(`✅ Created ${this.migrationTable} table`);
}
}
async loadMigrations() {
const files = await fs.readdir(this.migrationsPath);
const migrationFiles = files
.filter(file => file.endsWith('.js'))
.sort();
const migrations = [];
for (const file of migrationFiles) {
const filePath = path.join(this.migrationsPath, file);
const migration = require(path.resolve(filePath));
const content = await fs.readFile(filePath, 'utf8');
migrations.push({
...migration,
filename: file,
checksum: this.calculateChecksum(content)
});
}
return migrations;
}
async getExecutedMigrations() {
return await this.db(this.migrationTable)
.select('*')
.orderBy('executed_at');
}
async getPendingMigrations() {
const allMigrations = await this.loadMigrations();
const executed = await this.getExecutedMigrations();
const executedIds = new Set(executed.map(m => m.id));
return allMigrations.filter(m => !executedIds.has(m.id));
}
async runMigrations(options = {}) {
const { target, direction = 'up' } = options;
if (direction === 'up') {
return await this.migrateUp(target);
} else {
return await this.migrateDown(target);
}
}
async migrateUp(targetMigration) {
const pending = await this.getPendingMigrations();
let migrationsToRun = pending;
if (targetMigration) {
const targetIndex = pending.findIndex(m => m.id === targetMigration);
if (targetIndex === -1) {
throw new Error(`Migration ${targetMigration} not found or already executed`);
}
migrationsToRun = pending.slice(0, targetIndex + 1);
}
console.log(`🚀 Running ${migrationsToRun.length} migrations...`);
for (const migration of migrationsToRun) {
await this.executeMigration(migration, 'up');
}
console.log('✅ All migrations completed successfully');
}
async migrateDown(targetMigration) {
const executed = await this.getExecutedMigrations();
if (!targetMigration) {
// Rollback the last migration
targetMigration = executed[executed.length - 1]?.id;
}
const targetIndex = executed.findIndex(m => m.id === targetMigration);
if (targetIndex === -1) {
throw new Error(`Migration ${targetMigration} not found in executed migrations`);
}
const migrationsToRollback = executed.slice(targetIndex).reverse();
console.log(`🔄 Rolling back ${migrationsToRollback.length} migrations...`);
for (const executedMigration of migrationsToRollback) {
const migration = await this.loadMigrationById(executedMigration.id);
await this.executeMigration(migration, 'down');
}
console.log('✅ Rollback completed successfully');
}
async executeMigration(migration, direction) {
const trx = await this.db.transaction();
try {
console.log(`${direction === 'up' ? '⬆️' : '⬇️'} ${direction.toUpperCase()}: ${migration.id} - ${migration.description}`);
if (this.dryRun) {
console.log('🔍 DRY RUN - Migration would be executed');
await trx.rollback();
return;
}
// Execute migration
await migration[direction](trx);
// Validate migration (for up direction)
if (direction === 'up' && migration.validate) {
await migration.validate(trx);
}
// Update migration tracking
if (direction === 'up') {
await trx(this.migrationTable).insert({
id: migration.id,
description: migration.description,
checksum: migration.checksum
});
} else {
await trx(this.migrationTable)
.where('id', migration.id)
.delete();
}
await trx.commit();
console.log(`✅ ${migration.id} completed`);
} catch (error) {
await trx.rollback();
console.error(`❌ Migration ${migration.id} failed:`, error.message);
throw error;
}
}
calculateChecksum(content) {
const crypto = require('crypto');
return crypto.createHash('sha256').update(content).digest('hex');
}
async validateMigrations() {
const migrations = await this.loadMigrations();
const executed = await this.getExecutedMigrations();
const issues = [];
// Check for checksum mismatches
for (const executedMigration of executed) {
const currentMigration = migrations.find(m => m.id === executedMigration.id);
if (currentMigration && currentMigration.checksum !== executedMigration.checksum) {
issues.push({
type: 'checksum_mismatch',
migration: executedMigration.id,
message: 'Migration file has been modified after execution'
});
}
}
// Check for missing migrations
for (const executedMigration of executed) {
const exists = migrations.find(m => m.id === executedMigration.id);
if (!exists) {
issues.push({
type: 'missing_migration',
migration: executedMigration.id,
message: 'Executed migration file no longer exists'
});
}
}
return issues;
}
}
```
### 3. **Environment-Specific Migrations**
```javascript
// Environment configuration
const migrationConfigs = {
development: {
database: {
host: 'localhost',
port: 5432,
database: 'myapp_dev',
username: 'dev_user',
password: 'dev_pass'
},
options: {
allowDestructive: true,
requireApproval: false
}
},
staging: {
database: {
host: 'staging-db.example.com',
port: 5432,
database: 'myapp_staging',
username: process.env.DB_USER,
password: process.env.DB_PASS
},
options: {
allowDestructive: false,
requireApproval: true,
backupBeforeMigration: true
}
},
production: {
database: {
host: process.env.PROD_DB_HOST,
port: 5432,
database: process.env.PROD_DB_NAME,
username: process.env.PROD_DB_USER,
password: process.env.PROD_DB_PASS,
ssl: { rejectUnauthorized: false }
},
options: {
allowDestructive: false,
requireApproval: true,
backupBeforeMigration: true,
maintenanceMode: true
}
}
};
```
### 4. **CLI Interface**
```bash
#!/bin/bash
# migration-cli.sh
command="$1"
shift
case "$command" in
"create")
name="$1"
if [ -z "$name" ]; then
echo "Usage: ./migration-cli.sh create <migration_name>"
exit 1
fi
timestamp=$(date +%Y%m%d%H%M%S)
filename="${timestamp}_${name}.js"
cat > "migrations/$filename" << EOF
module.exports = {
id: '${timestamp}_${name}',
description: '${name}',
up: async (db) => {
// TODO: Implement migration
},
down: async (db) => {
// TODO: Implement rollback
},
validate: async (db) => {
// TODO: Implement validation
}
};
EOF
echo "✅ Created migration: $filename"
;;
"status")
node -e "
const { MigrationRunner } = require('./migration-runner');
const db = require('./db');
(async () => {
const runner = new MigrationRunner(db);
await runner.initialize();
const pending = await runner.getPendingMigrations();
const executed = await runner.getExecutedMigrations();
console.log('Migration Status:');
console.log('=================');
console.log('Executed:', executed.length);
console.log('Pending:', pending.length);
if (pending.length > 0) {
console.log('\nPending migrations:');
pending.forEach(m => console.log(' -', m.id));
}
process.exit(0);
})();
"
;;
"up")
target="$1"
node -e "
const { MigrationRunner } = require('./migration-runner');
const db = require('./db');
(async () => {
const runner = new MigrationRunner(db);
await runner.initialize();
await runner.migrateUp('$target');
process.exit(0);
})();
"
;;
"down")
target="$1"
echo "⚠️ Are you sure you want to rollback? This may result in data loss."
read -p "Type 'yes' to continue: " confirm
if [ "$confirm" = "yes" ]; then
node -e "
const { MigrationRunner } = require('./migration-runner');
const db = require('./db');
(async () => {
const runner = new MigrationRunner(db);
await runner.initialize();
await runner.migrateDown('$target');
process.exit(0);
})();
"
else
echo "Rollback cancelled"
fi
;;
"validate")
node -e "
const { MigrationRunner } = require('./migration-runner');
const db = require('./db');
(async () => {
const runner = new MigrationRunner(db);
const issues = await runner.validateMigrations();
if (issues.length === 0) {
console.log('✅ All migrations are valid');
} else {
console.log('❌ Migration validation issues:');
issues.forEach(issue => {
console.log(' -', issue.type + ':', issue.message, '(' + issue.migration + ')');
});
process.exit(1);
}
process.exit(0);
})();
"
;;
*)
echo "Usage: $0 {create|status|up|down|validate} [options]"
echo ""
echo "Commands:"
echo " create <name> Create a new migration"
echo " status Show migration status"
echo " up [target] Run pending migrations"
echo " down [target] Rollback migrations"
echo " validate Validate migration integrity"
exit 1
;;
esac
```
### 5. **CI/CD Integration**
```yaml
# .github/workflows/database-migration.yml
name: Database Migration
on:
push:
branches: [main]
paths: ['migrations/**']
pull_request:
branches: [main]
paths: ['migrations/**']
jobs:
validate-migrations:
runs-on: ubuntu-latest
services:
postgres:
image: postgres:13
env:
POSTGRES_PASSWORD: postgres
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@v3
- name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: '18'
- name: Install dependencies
run: npm ci
- name: Validate migrations
run: ./migration-cli.sh validate
env:
DATABASE_URL: postgresql://postgres:postgres@localhost:5432/test_db
- name: Test migrations (up)
run: ./migration-cli.sh up
env:
DATABASE_URL: postgresql://postgres:postgres@localhost:5432/test_db
- name: Test rollback
run: |
# Test rollback of last migration
./migration-cli.sh down
./migration-cli.sh up
env:
DATABASE_URL: postgresql://postgres:postgres@localhost:5432/test_db
deploy-staging:
if: github.ref == 'refs/heads/main'
needs: validate-migrations
runs-on: ubuntu-latest
environment: staging
steps:
- uses: actions/checkout@v3
- name: Deploy to staging
run: |
# Create database backup
pg_dump $STAGING_DATABASE_URL > backup-$(date +%Y%m%d_%H%M%S).sql
# Run migrations
./migration-cli.sh up
env:
DATABASE_URL: ${{ secrets.STAGING_DATABASE_URL }}
```
Provide robust database migration management with safety checks, validation, and rollback capabilities for reliable schema evolution.