mirror of
https://github.com/Memo-2023/mana-monorepo.git
synced 2026-05-15 04:41:09 +02:00
🔧 chore(auth): improve migration safety and docker setup
- Add safe-db-push.mjs script for safer database migrations - Update docker-entrypoint.sh with db:push fallback when migrations fail - Add validate-migrations.mjs script for CI migration validation - Update CI workflow to use migration validation - Update drizzle.config.ts with improved configuration
This commit is contained in:
parent
319ccd1a46
commit
9e771c9ae2
7 changed files with 353 additions and 5 deletions
3
.github/workflows/ci.yml
vendored
3
.github/workflows/ci.yml
vendored
|
|
@ -72,6 +72,9 @@ jobs:
|
|||
- name: Type check
|
||||
run: pnpm run type-check
|
||||
|
||||
- name: Validate migrations (no destructive changes)
|
||||
run: node scripts/validate-migrations.mjs
|
||||
|
||||
- name: Lint
|
||||
run: pnpm run lint || echo "Lint warnings found"
|
||||
|
||||
|
|
|
|||
18
pnpm-lock.yaml
generated
18
pnpm-lock.yaml
generated
|
|
@ -4041,6 +4041,24 @@ importers:
|
|||
specifier: ^5.0.0
|
||||
version: 5.9.3
|
||||
|
||||
packages/shared-error-tracking:
|
||||
devDependencies:
|
||||
'@nestjs/common':
|
||||
specifier: ^10.0.0
|
||||
version: 10.4.20(class-transformer@0.5.1)(class-validator@0.14.3)(reflect-metadata@0.2.2)(rxjs@7.8.2)
|
||||
'@nestjs/config':
|
||||
specifier: ^3.0.0
|
||||
version: 3.3.0(@nestjs/common@10.4.20(class-transformer@0.5.1)(class-validator@0.14.3)(reflect-metadata@0.2.2)(rxjs@7.8.2))(rxjs@7.8.2)
|
||||
'@nestjs/core':
|
||||
specifier: ^10.0.0
|
||||
version: 10.4.20(@nestjs/common@10.4.20(class-transformer@0.5.1)(class-validator@0.14.3)(reflect-metadata@0.2.2)(rxjs@7.8.2))(@nestjs/platform-express@10.4.20)(@nestjs/websockets@10.4.20)(reflect-metadata@0.2.2)(rxjs@7.8.2)
|
||||
'@types/node':
|
||||
specifier: ^20.0.0
|
||||
version: 20.19.25
|
||||
typescript:
|
||||
specifier: ^5.0.0
|
||||
version: 5.9.3
|
||||
|
||||
packages/shared-errors:
|
||||
devDependencies:
|
||||
'@nestjs/common':
|
||||
|
|
|
|||
204
scripts/validate-migrations.mjs
Normal file
204
scripts/validate-migrations.mjs
Normal file
|
|
@ -0,0 +1,204 @@
|
|||
#!/usr/bin/env node
|
||||
/**
|
||||
* Migration Validation Script
|
||||
*
|
||||
* Scans migration files for destructive SQL patterns and fails CI if found.
|
||||
* This prevents accidental data loss in production deployments.
|
||||
*
|
||||
* Usage:
|
||||
* node scripts/validate-migrations.mjs [--allow-destructive]
|
||||
*
|
||||
* Destructive patterns detected:
|
||||
* - DROP TABLE
|
||||
* - DROP COLUMN
|
||||
* - DROP INDEX (without CONCURRENTLY)
|
||||
* - DROP SCHEMA
|
||||
* - TRUNCATE
|
||||
* - DELETE FROM (without WHERE)
|
||||
*
|
||||
* Safe patterns (allowed):
|
||||
* - DROP ... IF EXISTS (only creates if not exists)
|
||||
* - CREATE ... IF NOT EXISTS
|
||||
* - ALTER TABLE ... ADD COLUMN
|
||||
* - CREATE INDEX CONCURRENTLY
|
||||
*/
|
||||
|
||||
import { readFileSync, readdirSync, existsSync, statSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
|
||||
// Configuration
|
||||
const MIGRATION_DIRS = [
|
||||
'services/mana-core-auth/src/db/migrations',
|
||||
// Add other services as needed
|
||||
];
|
||||
|
||||
// Destructive patterns to detect
|
||||
const DESTRUCTIVE_PATTERNS = [
|
||||
{
|
||||
pattern: /DROP\s+TABLE(?!\s+IF\s+EXISTS)/gi,
|
||||
message: 'DROP TABLE without IF EXISTS - will fail if table does not exist and is destructive',
|
||||
severity: 'error',
|
||||
},
|
||||
{
|
||||
pattern: /DROP\s+TABLE\s+IF\s+EXISTS\s+(?!.*CASCADE\s*;)/gi,
|
||||
message: 'DROP TABLE IF EXISTS - THIS WILL DELETE DATA',
|
||||
severity: 'error',
|
||||
},
|
||||
{
|
||||
pattern: /DROP\s+TABLE.*CASCADE/gi,
|
||||
message: 'DROP TABLE CASCADE - THIS WILL DELETE DATA AND DEPENDENT OBJECTS',
|
||||
severity: 'error',
|
||||
},
|
||||
{
|
||||
pattern: /ALTER\s+TABLE\s+\S+\s+DROP\s+COLUMN/gi,
|
||||
message: 'DROP COLUMN - THIS WILL DELETE DATA',
|
||||
severity: 'error',
|
||||
},
|
||||
{
|
||||
pattern: /DROP\s+SCHEMA(?!\s+IF\s+EXISTS)/gi,
|
||||
message: 'DROP SCHEMA without IF EXISTS',
|
||||
severity: 'error',
|
||||
},
|
||||
{
|
||||
pattern: /DROP\s+SCHEMA\s+IF\s+EXISTS.*CASCADE/gi,
|
||||
message: 'DROP SCHEMA CASCADE - THIS WILL DELETE ALL TABLES IN SCHEMA',
|
||||
severity: 'error',
|
||||
},
|
||||
{
|
||||
pattern: /TRUNCATE\s+(?:TABLE\s+)?/gi,
|
||||
message: 'TRUNCATE - THIS WILL DELETE ALL DATA IN TABLE',
|
||||
severity: 'error',
|
||||
},
|
||||
{
|
||||
pattern: /DELETE\s+FROM\s+\S+\s*(?:;|$)/gim,
|
||||
message: 'DELETE FROM without WHERE clause - THIS WILL DELETE ALL DATA',
|
||||
severity: 'error',
|
||||
},
|
||||
{
|
||||
pattern: /DROP\s+INDEX(?!\s+CONCURRENTLY)/gi,
|
||||
message: 'DROP INDEX without CONCURRENTLY - may cause table locks',
|
||||
severity: 'warning',
|
||||
},
|
||||
];
|
||||
|
||||
// Safe patterns that override destructive checks
|
||||
const SAFE_PATTERNS = [
|
||||
// These patterns indicate safe, idempotent operations
|
||||
/CREATE\s+(?:TABLE|INDEX|SCHEMA|TYPE)\s+IF\s+NOT\s+EXISTS/gi,
|
||||
/DO\s+\$\$.*EXCEPTION.*WHEN\s+duplicate_object/gis, // Safe enum creation pattern
|
||||
];
|
||||
|
||||
function findMigrationFiles(dir) {
|
||||
const files = [];
|
||||
|
||||
if (!existsSync(dir)) {
|
||||
return files;
|
||||
}
|
||||
|
||||
const entries = readdirSync(dir);
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = join(dir, entry);
|
||||
const stat = statSync(fullPath);
|
||||
|
||||
if (stat.isDirectory() && entry !== 'meta') {
|
||||
// Check subdirectories for .sql files
|
||||
files.push(...findMigrationFiles(fullPath));
|
||||
} else if (entry.endsWith('.sql')) {
|
||||
files.push(fullPath);
|
||||
}
|
||||
}
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
function validateMigration(filePath) {
|
||||
const content = readFileSync(filePath, 'utf-8');
|
||||
const issues = [];
|
||||
|
||||
// Check if file uses safe patterns throughout (reserved for future use)
|
||||
// const isSafeFile = SAFE_PATTERNS.some((pattern) => pattern.test(content));
|
||||
void SAFE_PATTERNS; // Silence unused warning - patterns reserved for future enhancements
|
||||
|
||||
for (const { pattern, message, severity } of DESTRUCTIVE_PATTERNS) {
|
||||
// Reset regex lastIndex
|
||||
pattern.lastIndex = 0;
|
||||
|
||||
let match;
|
||||
while ((match = pattern.exec(content)) !== null) {
|
||||
// Find line number
|
||||
const beforeMatch = content.substring(0, match.index);
|
||||
const lineNumber = (beforeMatch.match(/\n/g) || []).length + 1;
|
||||
|
||||
issues.push({
|
||||
file: filePath,
|
||||
line: lineNumber,
|
||||
message,
|
||||
severity,
|
||||
match: match[0].trim(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return issues;
|
||||
}
|
||||
|
||||
function main() {
|
||||
const args = process.argv.slice(2);
|
||||
const allowDestructive = args.includes('--allow-destructive');
|
||||
|
||||
console.log('🔍 Validating migration files for destructive patterns...\n');
|
||||
|
||||
const allIssues = [];
|
||||
let filesChecked = 0;
|
||||
|
||||
for (const dir of MIGRATION_DIRS) {
|
||||
const files = findMigrationFiles(dir);
|
||||
filesChecked += files.length;
|
||||
|
||||
for (const file of files) {
|
||||
const issues = validateMigration(file);
|
||||
allIssues.push(...issues);
|
||||
}
|
||||
}
|
||||
|
||||
// Separate errors and warnings
|
||||
const errors = allIssues.filter((i) => i.severity === 'error');
|
||||
const warnings = allIssues.filter((i) => i.severity === 'warning');
|
||||
|
||||
console.log(`📁 Checked ${filesChecked} migration files\n`);
|
||||
|
||||
if (warnings.length > 0) {
|
||||
console.log('⚠️ WARNINGS:\n');
|
||||
for (const issue of warnings) {
|
||||
console.log(` ${issue.file}:${issue.line}`);
|
||||
console.log(` ${issue.message}`);
|
||||
console.log(` Found: ${issue.match}\n`);
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length > 0) {
|
||||
console.log('❌ DESTRUCTIVE PATTERNS DETECTED:\n');
|
||||
for (const issue of errors) {
|
||||
console.log(` ${issue.file}:${issue.line}`);
|
||||
console.log(` ${issue.message}`);
|
||||
console.log(` Found: ${issue.match}\n`);
|
||||
}
|
||||
|
||||
if (allowDestructive) {
|
||||
console.log('⚠️ --allow-destructive flag set, continuing despite errors\n');
|
||||
console.log('🟡 Migration validation passed with warnings');
|
||||
process.exit(0);
|
||||
} else {
|
||||
console.log('💡 To proceed with destructive migrations, use --allow-destructive flag');
|
||||
console.log(' Or review and update the migration to use safe patterns.\n');
|
||||
console.log('❌ Migration validation FAILED');
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('✅ Migration validation passed - no destructive patterns found');
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
main();
|
||||
|
|
@ -1,9 +1,35 @@
|
|||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
# Skip migrations in Docker - tables are managed via 'pnpm db:push' locally
|
||||
# For fresh databases, run 'pnpm db:push' manually first
|
||||
echo "📋 Skipping migrations (run 'pnpm db:push' locally if needed)"
|
||||
# Run database migrations using proper migration files
|
||||
# This is SAFE - only applies versioned migration files, never drops tables
|
||||
echo "📋 Running database migrations..."
|
||||
|
||||
# Wait for PostgreSQL to be ready (up to 60 seconds)
|
||||
MAX_RETRIES=30
|
||||
RETRY_COUNT=0
|
||||
|
||||
while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do
|
||||
# db:migrate uses tsx which needs node, so we run it via pnpm
|
||||
if pnpm db:migrate 2>&1; then
|
||||
echo "✅ Database migrations completed successfully"
|
||||
break
|
||||
else
|
||||
EXIT_CODE=$?
|
||||
RETRY_COUNT=$((RETRY_COUNT + 1))
|
||||
|
||||
# Check if it's a connection error (exit code is typically non-zero for connection issues)
|
||||
if [ $RETRY_COUNT -lt $MAX_RETRIES ]; then
|
||||
echo "⏳ Database not ready or migration in progress, retrying in 2 seconds... ($RETRY_COUNT/$MAX_RETRIES)"
|
||||
sleep 2
|
||||
else
|
||||
echo "❌ Failed to run database migrations after $MAX_RETRIES attempts"
|
||||
echo " Exit code: $EXIT_CODE"
|
||||
echo " Check database connectivity and migration files"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
# Start the application
|
||||
echo "🚀 Starting Mana Core Auth..."
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ export default defineConfig({
|
|||
dbCredentials: {
|
||||
url: process.env.DATABASE_URL || 'postgresql://manacore:devpassword@localhost:5432/manacore',
|
||||
},
|
||||
schemaFilter: ['auth', 'credits', 'referrals', 'public'],
|
||||
schemaFilter: ['auth', 'credits', 'error_logs', 'referrals', 'public'],
|
||||
verbose: true,
|
||||
strict: true,
|
||||
});
|
||||
|
|
|
|||
|
|
@ -15,7 +15,9 @@
|
|||
"test:watch": "jest --watch",
|
||||
"test:cov": "jest --coverage",
|
||||
"test:e2e": "jest --config ./test/jest-e2e.json",
|
||||
"db:push": "drizzle-kit push",
|
||||
"db:push": "node scripts/safe-db-push.mjs",
|
||||
"db:push:force": "node scripts/safe-db-push.mjs --force",
|
||||
"db:push:unsafe": "drizzle-kit push",
|
||||
"db:generate": "drizzle-kit generate",
|
||||
"db:migrate": "tsx src/db/migrate.ts",
|
||||
"db:studio": "drizzle-kit studio"
|
||||
|
|
|
|||
95
services/mana-core-auth/scripts/safe-db-push.mjs
Normal file
95
services/mana-core-auth/scripts/safe-db-push.mjs
Normal file
|
|
@ -0,0 +1,95 @@
|
|||
#!/usr/bin/env node
|
||||
/**
|
||||
* Safe db:push wrapper
|
||||
*
|
||||
* This script wraps drizzle-kit push to prevent accidental execution
|
||||
* in production or staging environments.
|
||||
*
|
||||
* Usage:
|
||||
* pnpm db:push # Uses this wrapper
|
||||
* pnpm db:push:force # Bypass safety check (for emergencies only)
|
||||
*/
|
||||
|
||||
import { execSync } from 'child_process';
|
||||
|
||||
const NODE_ENV = process.env.NODE_ENV || 'development';
|
||||
const DATABASE_URL = process.env.DATABASE_URL || '';
|
||||
|
||||
// Check for production/staging indicators
|
||||
const BLOCKED_ENVS = ['production', 'staging', 'prod', 'stage'];
|
||||
const PROD_HOST_PATTERNS = [
|
||||
/\.railway\.app/,
|
||||
/\.supabase\.co/,
|
||||
/\.neon\.tech/,
|
||||
/\.render\.com/,
|
||||
/staging\./,
|
||||
/prod\./,
|
||||
/production\./,
|
||||
];
|
||||
|
||||
function isProductionEnvironment() {
|
||||
// Check NODE_ENV
|
||||
if (BLOCKED_ENVS.includes(NODE_ENV.toLowerCase())) {
|
||||
return { blocked: true, reason: `NODE_ENV is set to '${NODE_ENV}'` };
|
||||
}
|
||||
|
||||
// Check DATABASE_URL for production patterns
|
||||
for (const pattern of PROD_HOST_PATTERNS) {
|
||||
if (pattern.test(DATABASE_URL)) {
|
||||
return {
|
||||
blocked: true,
|
||||
reason: `DATABASE_URL contains production pattern: ${pattern}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Check for CI/CD environment
|
||||
if (process.env.CI === 'true' || process.env.GITHUB_ACTIONS === 'true') {
|
||||
return { blocked: true, reason: 'Running in CI/CD environment' };
|
||||
}
|
||||
|
||||
return { blocked: false };
|
||||
}
|
||||
|
||||
function main() {
|
||||
const args = process.argv.slice(2);
|
||||
const forceFlag = args.includes('--force') || args.includes('-f');
|
||||
|
||||
console.log('🔒 Safe db:push - Environment Check\n');
|
||||
|
||||
const check = isProductionEnvironment();
|
||||
|
||||
if (check.blocked && !forceFlag) {
|
||||
console.log('❌ BLOCKED: db:push is not allowed in this environment\n');
|
||||
console.log(` Reason: ${check.reason}\n`);
|
||||
console.log(' db:push can cause data loss and should only be used in development.\n');
|
||||
console.log(' For production/staging, use:');
|
||||
console.log(' pnpm db:generate # Generate migration files');
|
||||
console.log(' pnpm db:migrate # Apply migrations safely\n');
|
||||
console.log(' If you absolutely need to run db:push (DANGEROUS):');
|
||||
console.log(' pnpm db:push:force\n');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (check.blocked && forceFlag) {
|
||||
console.log('⚠️ WARNING: --force flag detected, bypassing safety check\n');
|
||||
console.log(` Blocked reason was: ${check.reason}\n`);
|
||||
console.log(' THIS MAY CAUSE DATA LOSS. Proceeding in 5 seconds...\n');
|
||||
|
||||
// Give user time to cancel
|
||||
execSync('sleep 5');
|
||||
}
|
||||
|
||||
console.log('✅ Environment check passed\n');
|
||||
console.log('📤 Running drizzle-kit push...\n');
|
||||
|
||||
try {
|
||||
// Pass through any additional args (except --force)
|
||||
const drizzleArgs = args.filter((arg) => arg !== '--force' && arg !== '-f').join(' ');
|
||||
execSync(`pnpm drizzle-kit push ${drizzleArgs}`, { stdio: 'inherit' });
|
||||
} catch {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
Loading…
Add table
Add a link
Reference in a new issue