Files
cmdb-insight/backend/scripts/migrate-sqlite-to-postgres.ts
Bert Hausmans a7f8301196 Add database adapter system, production deployment configs, and new dashboard components
- Add PostgreSQL and SQLite database adapters with factory pattern
- Add migration script for SQLite to PostgreSQL
- Add production Dockerfiles and docker-compose configs
- Add deployment documentation and scripts
- Add BIA sync dashboard and matching service
- Add data completeness configuration and components
- Add new dashboard components (BusinessImportanceComparison, ComplexityDynamics, etc.)
- Update various services and routes
- Remove deprecated management-parameters.json and taxonomy files
2026-01-14 00:38:40 +01:00

185 lines
5.7 KiB
TypeScript

/**
* Migration script: SQLite to PostgreSQL
*
* Migrates data from SQLite databases to PostgreSQL.
*
* Usage:
* DATABASE_URL=postgresql://user:pass@host:port/db tsx scripts/migrate-sqlite-to-postgres.ts
*/
import Database from 'better-sqlite3';
import { Pool } from 'pg';
import { join, dirname } from 'path';
import { fileURLToPath } from 'url';
import * as fs from 'fs';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const SQLITE_CACHE_DB = join(__dirname, '../../data/cmdb-cache.db');
const SQLITE_CLASSIFICATIONS_DB = join(__dirname, '../../data/classifications.db');
async function migrate() {
const databaseUrl = process.env.DATABASE_URL;
if (!databaseUrl) {
console.error('Error: DATABASE_URL environment variable is required');
console.error('Example: DATABASE_URL=postgresql://user:pass@localhost:5432/cmdb');
process.exit(1);
}
console.log('Starting migration from SQLite to PostgreSQL...');
console.log(`PostgreSQL: ${databaseUrl.replace(/:[^:@]+@/, ':****@')}`);
const pg = new Pool({ connectionString: databaseUrl });
try {
// Test connection
await pg.query('SELECT 1');
console.log('✓ PostgreSQL connection successful');
// Migrate cache database
if (fs.existsSync(SQLITE_CACHE_DB)) {
console.log('\nMigrating cache database...');
await migrateCacheDatabase(pg);
} else {
console.log('\n⚠ Cache database not found, skipping...');
}
// Migrate classifications database
if (fs.existsSync(SQLITE_CLASSIFICATIONS_DB)) {
console.log('\nMigrating classifications database...');
await migrateClassificationsDatabase(pg);
} else {
console.log('\n⚠ Classifications database not found, skipping...');
}
console.log('\n✓ Migration completed successfully!');
} catch (error) {
console.error('\n✗ Migration failed:', error);
process.exit(1);
} finally {
await pg.end();
}
}
async function migrateCacheDatabase(pg: Pool) {
const sqlite = new Database(SQLITE_CACHE_DB, { readonly: true });
try {
// Migrate cached_objects
const objects = sqlite.prepare('SELECT * FROM cached_objects').all() as any[];
console.log(` Migrating ${objects.length} cached objects...`);
for (const obj of objects) {
await pg.query(
`INSERT INTO cached_objects (id, object_key, object_type, label, data, jira_updated_at, jira_created_at, cached_at)
VALUES ($1, $2, $3, $4, $5::jsonb, $6, $7, $8)
ON CONFLICT (id) DO UPDATE SET
object_key = EXCLUDED.object_key,
label = EXCLUDED.label,
data = EXCLUDED.data,
jira_updated_at = EXCLUDED.jira_updated_at,
cached_at = EXCLUDED.cached_at`,
[
obj.id,
obj.object_key,
obj.object_type,
obj.label,
obj.data, // Already JSON string, PostgreSQL will parse it
obj.jira_updated_at,
obj.jira_created_at,
obj.cached_at,
]
);
}
// Migrate object_relations
const relations = sqlite.prepare('SELECT * FROM object_relations').all() as any[];
console.log(` Migrating ${relations.length} relations...`);
for (const rel of relations) {
await pg.query(
`INSERT INTO object_relations (source_id, target_id, attribute_name, source_type, target_type)
VALUES ($1, $2, $3, $4, $5)
ON CONFLICT (source_id, target_id, attribute_name) DO UPDATE SET
source_type = EXCLUDED.source_type,
target_type = EXCLUDED.target_type`,
[
rel.source_id,
rel.target_id,
rel.attribute_name,
rel.source_type,
rel.target_type,
]
);
}
// Migrate sync_metadata
const metadata = sqlite.prepare('SELECT * FROM sync_metadata').all() as any[];
console.log(` Migrating ${metadata.length} metadata entries...`);
for (const meta of metadata) {
await pg.query(
`INSERT INTO sync_metadata (key, value, updated_at)
VALUES ($1, $2, $3)
ON CONFLICT (key) DO UPDATE SET
value = EXCLUDED.value,
updated_at = EXCLUDED.updated_at`,
[meta.key, meta.value, meta.updated_at]
);
}
console.log(' ✓ Cache database migrated');
} finally {
sqlite.close();
}
}
async function migrateClassificationsDatabase(pg: Pool) {
const sqlite = new Database(SQLITE_CLASSIFICATIONS_DB, { readonly: true });
try {
// Migrate classification_history
const history = sqlite.prepare('SELECT * FROM classification_history').all() as any[];
console.log(` Migrating ${history.length} classification history entries...`);
for (const entry of history) {
await pg.query(
`INSERT INTO classification_history (application_id, application_name, changes, source, timestamp, user_id)
VALUES ($1, $2, $3, $4, $5, $6)
ON CONFLICT DO NOTHING`,
[
entry.application_id,
entry.application_name,
entry.changes,
entry.source,
entry.timestamp,
entry.user_id,
]
);
}
// Migrate session_state
const sessions = sqlite.prepare('SELECT * FROM session_state').all() as any[];
console.log(` Migrating ${sessions.length} session state entries...`);
for (const session of sessions) {
await pg.query(
`INSERT INTO session_state (key, value, updated_at)
VALUES ($1, $2, $3)
ON CONFLICT (key) DO UPDATE SET
value = EXCLUDED.value,
updated_at = EXCLUDED.updated_at`,
[session.key, session.value, session.updated_at]
);
}
console.log(' ✓ Classifications database migrated');
} finally {
sqlite.close();
}
}
// Run migration
migrate().catch(console.error);