import { Pool } from 'pg'; import fs from 'fs'; import path from 'path'; import { fileURLToPath } from 'url'; import { dirname } from 'path'; import dotenv from 'dotenv'; dotenv.config(); const __filename = fileURLToPath(import.meta.url); const __dirname = dirname(__filename); const pool = new Pool({ connectionString: process.env.DATABASE_URL, ssl: process.env.NODE_ENV === 'production' ? { rejectUnauthorized: false } : false, }); async function runMigrations() { const client = await pool.connect(); try { await client.query('BEGIN'); const migrationsDir = path.join(__dirname, '../migrations'); const migrationFiles = fs .readdirSync(migrationsDir) .filter((file) => file.endsWith('.sql')) .sort(); await client.query(` CREATE TABLE IF NOT EXISTS migrations ( id SERIAL PRIMARY KEY, name TEXT NOT NULL UNIQUE, run_at TIMESTAMPTZ NOT NULL DEFAULT NOW() ) `); const { rows: completedMigrations } = await client.query('SELECT name FROM migrations'); const completedMigrationNames = new Set(completedMigrations.map((m) => m.name)); for (const file of migrationFiles) { if (!completedMigrationNames.has(file)) { // console.log(`Running migration: ${file}`); const sql = fs.readFileSync(path.join(migrationsDir, file), 'utf8'); await client.query(sql); await client.query('INSERT INTO migrations (name) VALUES ($1)', [file]); // console.log(`Completed migration: ${file}`); } } await client.query('COMMIT'); // console.log('All migrations completed successfully'); } catch (error) { await client.query('ROLLBACK'); console.error('Migration failed:', error); process.exit(1); } finally { client.release(); await pool.end(); } } runMigrations();