grabbit f557c06771 feat: complete database schema migration to UUID primary keys
## Database Migrations (18 new)
- Migrate all primary keys from SERIAL to UUID
- Add soft delete (deleted_at) to all 19 entities
- Add missing indexes for performance optimization
- Add CHECK constraints for data validation
- Add user audit fields (last_login_at, timezone, locale)
- Add weather station location fields (latitude, longitude, elevation)
- Add foreign key relationships (CameraDevice→Device, ValidatedEvent→WeatherStation)
- Prepare private key encryption fields

## Backend Entity Updates
- All entities updated with UUID primary keys
- Added @DeleteDateColumn for soft delete support
- Updated relations and foreign key types

## Backend Service/Controller Updates
- Changed ID parameters from number to string (UUID)
- Removed ParseIntPipe from controllers
- Updated TypeORM queries for string IDs

## Frontend Updates
- Updated all service interfaces to use string IDs
- Fixed CameraDevice.location as JSONB object
- Updated weather.ts with new fields (elevation, timezone)
- Added Supabase integration hooks and lib
- Fixed chart components for new data structure

## Cleanup
- Removed deprecated .claude/agents configuration files

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2025-12-21 03:33:26 +08:00

267 lines
8.6 KiB
TypeScript

import { Client, ClientConfig } from 'pg';
// Source database configuration
const sourceConfig: ClientConfig = {
host: '10.85.92.236',
port: 5433,
database: 'dev',
user: 'rabbit',
password: 'g39j90p11',
};
// Target database configuration (Supabase)
const targetConfig: ClientConfig = {
host: 'aws-1-us-east-1.pooler.supabase.com',
port: 6543,
database: 'postgres',
user: 'postgres.ffbgowwvcqmdtvvabmnh',
password: '!a_KW.-6Grb-X?#',
ssl: { rejectUnauthorized: false },
};
// Tables in dependency order (tables with no dependencies first)
const MIGRATION_ORDER = [
// Layer 1: No foreign key dependencies
'user_profiles',
'inventory_devices',
'subscription_plans',
'weather_stations',
'camera_devices',
'analysis_results', // Independent table
// Layer 2: Depends on Layer 1
'user_identities', // -> user_profiles
'devices', // -> user_profiles
'user_subscriptions', // -> user_profiles, subscription_plans
'weather_observations', // -> weather_stations
'weather_forecasts', // -> weather_stations
// Layer 3: Depends on Layer 2
'device_registrations', // -> user_profiles, devices
'device_certificates', // -> devices
'device_configurations', // -> devices
'device_security_events', // -> devices
'raw_events', // -> devices, user_profiles
'subscription_history', // -> user_subscriptions
'payment_records', // -> user_subscriptions
// Layer 4: Depends on Layer 3
'validated_events', // -> raw_events, devices, user_profiles
];
async function getTableColumns(client: Client, tableName: string): Promise<string[]> {
const result = await client.query(`
SELECT column_name
FROM information_schema.columns
WHERE table_name = $1 AND table_schema = 'public'
ORDER BY ordinal_position
`, [tableName]);
return result.rows.map(row => row.column_name);
}
async function getJsonColumns(client: Client, tableName: string): Promise<string[]> {
const result = await client.query(`
SELECT column_name
FROM information_schema.columns
WHERE table_name = $1 AND table_schema = 'public'
AND data_type IN ('json', 'jsonb')
`, [tableName]);
return result.rows.map(row => row.column_name);
}
// Column name mappings: source column -> target column
const COLUMN_MAPPINGS: Record<string, Record<string, string>> = {
weather_forecasts: {
weather_station_id: 'station_id',
},
};
async function migrateTable(
sourceClient: Client,
targetClient: Client,
tableName: string
): Promise<number> {
console.log(`\nMigrating table: ${tableName}`);
// Get source data
const sourceData = await sourceClient.query(`SELECT * FROM ${tableName}`);
const rowCount = sourceData.rows.length;
if (rowCount === 0) {
console.log(` No data to migrate`);
return 0;
}
console.log(` Found ${rowCount} rows`);
// Get target table columns to ensure we only insert columns that exist
const targetColumns = await getTableColumns(targetClient, tableName);
if (targetColumns.length === 0) {
console.log(` WARNING: Table ${tableName} does not exist in target database, skipping`);
return 0;
}
// Get column mappings for this table
const columnMapping = COLUMN_MAPPINGS[tableName] || {};
const reverseMapping: Record<string, string> = {};
for (const [source, target] of Object.entries(columnMapping)) {
reverseMapping[target] = source;
}
// Filter source columns to only those that exist in target (with mapping)
const sourceColumns = Object.keys(sourceData.rows[0] || {});
const columnsToMigrate: { source: string; target: string }[] = [];
for (const sourceCol of sourceColumns) {
const targetCol = columnMapping[sourceCol] || sourceCol;
if (targetColumns.includes(targetCol)) {
columnsToMigrate.push({ source: sourceCol, target: targetCol });
}
}
if (columnsToMigrate.length === 0) {
console.log(` WARNING: No common columns found, skipping`);
return 0;
}
// Get JSON/JSONB columns for proper serialization
const jsonColumns = await getJsonColumns(targetClient, tableName);
if (jsonColumns.length > 0) {
console.log(` JSON columns: ${jsonColumns.join(', ')}`);
}
const targetColumnNames = columnsToMigrate.map(c => c.target);
console.log(` Migrating columns: ${targetColumnNames.join(', ')}`);
// Clear existing data in target table
await targetClient.query(`DELETE FROM ${tableName}`);
console.log(` Cleared existing data in target`);
// Build insert query with parameterized placeholders
const columnList = targetColumnNames.join(', ');
let insertedCount = 0;
for (const row of sourceData.rows) {
// Serialize JSON columns properly, read from source column name
const values = columnsToMigrate.map(({ source, target }) => {
const value = row[source];
if (jsonColumns.includes(target) && value !== null && typeof value === 'object') {
return JSON.stringify(value);
}
return value;
});
const placeholders = columnsToMigrate.map((_, i) => `$${i + 1}`).join(', ');
try {
await targetClient.query(
`INSERT INTO ${tableName} (${columnList}) VALUES (${placeholders})`,
values
);
insertedCount++;
} catch (error: any) {
console.error(` Error inserting row: ${error.message}`);
console.error(` Row data: ${JSON.stringify(row)}`);
}
}
console.log(` Successfully inserted ${insertedCount}/${rowCount} rows`);
return insertedCount;
}
async function resetSequences(client: Client, tableName: string) {
try {
// Find sequence for this table
const result = await client.query(`
SELECT column_name, column_default
FROM information_schema.columns
WHERE table_name = $1
AND table_schema = 'public'
AND column_default LIKE 'nextval%'
`, [tableName]);
for (const row of result.rows) {
const columnName = row.column_name;
// Get max value and reset sequence
const maxResult = await client.query(`SELECT MAX(${columnName}) as max_val FROM ${tableName}`);
const maxVal = maxResult.rows[0].max_val;
if (maxVal !== null) {
// Extract sequence name from column_default
const match = row.column_default.match(/nextval\('([^']+)'/);
if (match) {
const sequenceName = match[1];
await client.query(`SELECT setval('${sequenceName}', $1, true)`, [maxVal]);
console.log(` Reset sequence ${sequenceName} to ${maxVal}`);
}
}
}
} catch (error: any) {
// Ignore errors - some tables may not have sequences
}
}
async function migrateData() {
const sourceClient = new Client(sourceConfig);
const targetClient = new Client(targetConfig);
try {
console.log('Connecting to source database...');
await sourceClient.connect();
console.log('Connected to source database!');
console.log('Connecting to target database (Supabase)...');
await targetClient.connect();
console.log('Connected to target database!');
// Disable triggers temporarily
console.log('\nDisabling triggers...');
await targetClient.query('SET session_replication_role = replica');
let totalMigrated = 0;
const results: { table: string; count: number }[] = [];
for (const tableName of MIGRATION_ORDER) {
try {
const count = await migrateTable(sourceClient, targetClient, tableName);
results.push({ table: tableName, count });
totalMigrated += count;
} catch (error: any) {
console.error(`Error migrating ${tableName}: ${error.message}`);
results.push({ table: tableName, count: -1 });
}
}
// Re-enable triggers
console.log('\nRe-enabling triggers...');
await targetClient.query('SET session_replication_role = DEFAULT');
// Reset sequences
console.log('\nResetting sequences...');
for (const tableName of MIGRATION_ORDER) {
await resetSequences(targetClient, tableName);
}
// Print summary
console.log('\n========================================');
console.log('Migration Summary:');
console.log('========================================');
for (const result of results) {
const status = result.count >= 0 ? `${result.count} rows` : 'FAILED';
console.log(`${result.table.padEnd(25)} : ${status}`);
}
console.log('========================================');
console.log(`Total rows migrated: ${totalMigrated}`);
console.log('========================================');
} catch (error: any) {
console.error('Migration failed:', error.message);
throw error;
} finally {
await sourceClient.end();
await targetClient.end();
}
}
migrateData().catch(console.error);