Merge pull request 'Alembic Database Syncronization' (#16) from dev into loaf-prod

Reviewed-on: andika/membership-be#16
This commit is contained in:
2026-01-05 10:09:27 +00:00
2 changed files with 723 additions and 0 deletions

View File

@@ -0,0 +1,378 @@
"""align_prod_with_dev
Revision ID: 011_align_prod_dev
Revises: 010_add_email_exp
Create Date: 2026-01-05
Aligns PROD database schema with DEV database schema (source of truth).
Fixes type mismatches, removes PROD-only columns, adds DEV-only columns, updates nullable constraints.
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import JSONB, JSON
# revision identifiers, used by Alembic.
revision: str = '011_align_prod_dev'
down_revision: Union[str, None] = '010_add_email_exp'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Align PROD schema with DEV schema (source of truth)"""
from sqlalchemy import inspect
conn = op.get_bind()
inspector = inspect(conn)
print("Starting schema alignment: PROD → DEV (source of truth)...")
# ============================================================
# 1. FIX USERS TABLE
# ============================================================
print("\n[1/14] Fixing users table...")
users_columns = {col['name'] for col in inspector.get_columns('users')}
# Remove PROD-only columns (not in models.py or DEV)
if 'bio' in users_columns:
op.drop_column('users', 'bio')
print(" ✓ Removed users.bio (PROD-only)")
if 'interests' in users_columns:
op.drop_column('users', 'interests')
print(" ✓ Removed users.interests (PROD-only)")
try:
# Change constrained VARCHAR(n) to unconstrained VARCHAR
op.alter_column('users', 'first_name', type_=sa.String(), postgresql_using='first_name::varchar')
op.alter_column('users', 'last_name', type_=sa.String(), postgresql_using='last_name::varchar')
op.alter_column('users', 'email', type_=sa.String(), postgresql_using='email::varchar')
op.alter_column('users', 'phone', type_=sa.String(), postgresql_using='phone::varchar')
op.alter_column('users', 'city', type_=sa.String(), postgresql_using='city::varchar')
op.alter_column('users', 'state', type_=sa.String(), postgresql_using='state::varchar')
op.alter_column('users', 'zipcode', type_=sa.String(), postgresql_using='zipcode::varchar')
op.alter_column('users', 'partner_first_name', type_=sa.String(), postgresql_using='partner_first_name::varchar')
op.alter_column('users', 'partner_last_name', type_=sa.String(), postgresql_using='partner_last_name::varchar')
op.alter_column('users', 'referred_by_member_name', type_=sa.String(), postgresql_using='referred_by_member_name::varchar')
op.alter_column('users', 'password_hash', type_=sa.String(), postgresql_using='password_hash::varchar')
op.alter_column('users', 'email_verification_token', type_=sa.String(), postgresql_using='email_verification_token::varchar')
op.alter_column('users', 'password_reset_token', type_=sa.String(), postgresql_using='password_reset_token::varchar')
print(" ✓ Changed VARCHAR(n) to VARCHAR")
# Change TEXT to VARCHAR
op.alter_column('users', 'address', type_=sa.String(), postgresql_using='address::varchar')
op.alter_column('users', 'profile_photo_url', type_=sa.String(), postgresql_using='profile_photo_url::varchar')
print(" ✓ Changed TEXT to VARCHAR")
# Change DATE to TIMESTAMP
op.alter_column('users', 'date_of_birth', type_=sa.DateTime(), postgresql_using='date_of_birth::timestamp')
op.alter_column('users', 'member_since', type_=sa.DateTime(), postgresql_using='member_since::timestamp')
print(" ✓ Changed DATE to TIMESTAMP")
# Change JSONB to JSON
op.alter_column('users', 'lead_sources', type_=JSON(), postgresql_using='lead_sources::json')
print(" ✓ Changed lead_sources JSONB to JSON")
# Change TEXT to JSON for volunteer_interests
op.alter_column('users', 'volunteer_interests', type_=JSON(), postgresql_using='volunteer_interests::json')
print(" ✓ Changed volunteer_interests TEXT to JSON")
except Exception as e:
print(f" ⚠️ Warning: Some type conversions failed: {e}")
# Fix nullable constraints to match DEV
op.alter_column('users', 'address', nullable=False)
op.alter_column('users', 'city', nullable=False)
op.alter_column('users', 'state', nullable=False)
op.alter_column('users', 'zipcode', nullable=False)
op.alter_column('users', 'phone', nullable=False)
op.alter_column('users', 'date_of_birth', nullable=False)
op.alter_column('users', 'show_in_directory', nullable=False)
op.alter_column('users', 'newsletter_publish_name', nullable=False)
op.alter_column('users', 'newsletter_publish_birthday', nullable=False)
op.alter_column('users', 'newsletter_publish_photo', nullable=False)
op.alter_column('users', 'newsletter_publish_none', nullable=False)
op.alter_column('users', 'force_password_change', nullable=False)
op.alter_column('users', 'scholarship_requested', nullable=False)
op.alter_column('users', 'accepts_tos', nullable=False)
print(" ✓ Fixed nullable constraints")
# ============================================================
# 2. FIX DONATIONS TABLE
# ============================================================
print("\n[2/14] Fixing donations table...")
donations_columns = {col['name'] for col in inspector.get_columns('donations')}
# Remove PROD-only columns
if 'is_anonymous' in donations_columns:
op.drop_column('donations', 'is_anonymous')
print(" ✓ Removed donations.is_anonymous (PROD-only)")
if 'completed_at' in donations_columns:
op.drop_column('donations', 'completed_at')
print(" ✓ Removed donations.completed_at (PROD-only)")
if 'message' in donations_columns:
op.drop_column('donations', 'message')
print(" ✓ Removed donations.message (PROD-only)")
try:
op.alter_column('donations', 'donor_email', type_=sa.String(), postgresql_using='donor_email::varchar')
op.alter_column('donations', 'donor_name', type_=sa.String(), postgresql_using='donor_name::varchar')
op.alter_column('donations', 'stripe_payment_intent_id', type_=sa.String(), postgresql_using='stripe_payment_intent_id::varchar')
print(" ✓ Changed VARCHAR(n) to VARCHAR")
except Exception as e:
print(f" ⚠️ Warning: Type conversion failed: {e}")
# ============================================================
# 3. FIX SUBSCRIPTIONS TABLE
# ============================================================
print("\n[3/14] Fixing subscriptions table...")
subscriptions_columns = {col['name'] for col in inspector.get_columns('subscriptions')}
# Remove PROD-only columns
if 'cancel_at_period_end' in subscriptions_columns:
op.drop_column('subscriptions', 'cancel_at_period_end')
print(" ✓ Removed subscriptions.cancel_at_period_end (PROD-only)")
if 'canceled_at' in subscriptions_columns:
op.drop_column('subscriptions', 'canceled_at')
print(" ✓ Removed subscriptions.canceled_at (PROD-only)")
if 'current_period_start' in subscriptions_columns:
op.drop_column('subscriptions', 'current_period_start')
print(" ✓ Removed subscriptions.current_period_start (PROD-only)")
if 'current_period_end' in subscriptions_columns:
op.drop_column('subscriptions', 'current_period_end')
print(" ✓ Removed subscriptions.current_period_end (PROD-only)")
try:
op.alter_column('subscriptions', 'stripe_subscription_id', type_=sa.String(), postgresql_using='stripe_subscription_id::varchar')
op.alter_column('subscriptions', 'stripe_customer_id', type_=sa.String(), postgresql_using='stripe_customer_id::varchar')
op.alter_column('subscriptions', 'payment_method', type_=sa.String(), postgresql_using='payment_method::varchar')
print(" ✓ Changed VARCHAR(n) to VARCHAR")
except Exception as e:
print(f" ⚠️ Warning: Type conversion failed: {e}")
# Fix nullable constraints
op.alter_column('subscriptions', 'start_date', nullable=False)
op.alter_column('subscriptions', 'manual_payment', nullable=False)
op.alter_column('subscriptions', 'donation_cents', nullable=False)
op.alter_column('subscriptions', 'base_subscription_cents', nullable=False)
print(" ✓ Fixed nullable constraints")
# ============================================================
# 4. FIX STORAGE_USAGE TABLE
# ============================================================
print("\n[4/14] Fixing storage_usage table...")
storage_columns = {col['name'] for col in inspector.get_columns('storage_usage')}
# Remove PROD-only columns
if 'created_at' in storage_columns:
op.drop_column('storage_usage', 'created_at')
print(" ✓ Removed storage_usage.created_at (PROD-only)")
if 'updated_at' in storage_columns:
op.drop_column('storage_usage', 'updated_at')
print(" ✓ Removed storage_usage.updated_at (PROD-only)")
op.alter_column('storage_usage', 'max_bytes_allowed', nullable=False)
print(" ✓ Fixed nullable constraint")
# ============================================================
# 5. FIX EVENT_GALLERIES TABLE (Add missing DEV columns)
# ============================================================
print("\n[5/14] Fixing event_galleries table...")
event_galleries_columns = {col['name'] for col in inspector.get_columns('event_galleries')}
# Add DEV-only columns (exist in models.py but not in PROD)
if 'image_key' not in event_galleries_columns:
op.add_column('event_galleries', sa.Column('image_key', sa.String(), nullable=False, server_default=''))
print(" ✓ Added event_galleries.image_key")
if 'file_size_bytes' not in event_galleries_columns:
op.add_column('event_galleries', sa.Column('file_size_bytes', sa.Integer(), nullable=False, server_default='0'))
print(" ✓ Added event_galleries.file_size_bytes")
try:
op.alter_column('event_galleries', 'image_url', type_=sa.String(), postgresql_using='image_url::varchar')
print(" ✓ Changed TEXT to VARCHAR")
except Exception as e:
print(f" ⚠️ Warning: Type conversion failed: {e}")
# Note: uploaded_by column already has correct nullable=False in both DEV and PROD
# ============================================================
# 6. FIX BYLAWS_DOCUMENTS TABLE
# ============================================================
print("\n[6/14] Fixing bylaws_documents table...")
bylaws_columns = {col['name'] for col in inspector.get_columns('bylaws_documents')}
# Remove PROD-only column
if 'updated_at' in bylaws_columns:
op.drop_column('bylaws_documents', 'updated_at')
print(" ✓ Removed bylaws_documents.updated_at (PROD-only)")
try:
op.alter_column('bylaws_documents', 'title', type_=sa.String(), postgresql_using='title::varchar')
op.alter_column('bylaws_documents', 'version', type_=sa.String(), postgresql_using='version::varchar')
op.alter_column('bylaws_documents', 'document_url', type_=sa.String(), postgresql_using='document_url::varchar')
op.alter_column('bylaws_documents', 'document_type', type_=sa.String(), postgresql_using='document_type::varchar')
print(" ✓ Changed column types")
except Exception as e:
print(f" ⚠️ Warning: Type conversion failed: {e}")
op.alter_column('bylaws_documents', 'document_type', nullable=True)
print(" ✓ Fixed nullable constraint")
# ============================================================
# 7. FIX EVENTS TABLE
# ============================================================
print("\n[7/14] Fixing events table...")
try:
op.alter_column('events', 'title', type_=sa.String(), postgresql_using='title::varchar')
op.alter_column('events', 'location', type_=sa.String(), postgresql_using='location::varchar')
op.alter_column('events', 'calendar_uid', type_=sa.String(), postgresql_using='calendar_uid::varchar')
print(" ✓ Changed VARCHAR(n) to VARCHAR")
except Exception as e:
print(f" ⚠️ Warning: {e}")
op.alter_column('events', 'location', nullable=False)
op.alter_column('events', 'created_by', nullable=False)
print(" ✓ Fixed nullable constraints")
# ============================================================
# 8. FIX PERMISSIONS TABLE
# ============================================================
print("\n[8/14] Fixing permissions table...")
try:
op.alter_column('permissions', 'code', type_=sa.String(), postgresql_using='code::varchar')
op.alter_column('permissions', 'name', type_=sa.String(), postgresql_using='name::varchar')
op.alter_column('permissions', 'module', type_=sa.String(), postgresql_using='module::varchar')
print(" ✓ Changed VARCHAR(n) to VARCHAR")
except Exception as e:
print(f" ⚠️ Warning: {e}")
op.alter_column('permissions', 'module', nullable=False)
print(" ✓ Fixed nullable constraint")
# ============================================================
# 9. FIX ROLES TABLE
# ============================================================
print("\n[9/14] Fixing roles table...")
try:
op.alter_column('roles', 'code', type_=sa.String(), postgresql_using='code::varchar')
op.alter_column('roles', 'name', type_=sa.String(), postgresql_using='name::varchar')
print(" ✓ Changed VARCHAR(n) to VARCHAR")
except Exception as e:
print(f" ⚠️ Warning: {e}")
op.alter_column('roles', 'is_system_role', nullable=False)
print(" ✓ Fixed nullable constraint")
# ============================================================
# 10. FIX USER_INVITATIONS TABLE
# ============================================================
print("\n[10/14] Fixing user_invitations table...")
try:
op.alter_column('user_invitations', 'email', type_=sa.String(), postgresql_using='email::varchar')
op.alter_column('user_invitations', 'token', type_=sa.String(), postgresql_using='token::varchar')
print(" ✓ Changed VARCHAR(n) to VARCHAR")
except Exception as e:
print(f" ⚠️ Warning: {e}")
op.alter_column('user_invitations', 'invited_at', nullable=False)
print(" ✓ Fixed nullable constraint")
# ============================================================
# 11. FIX NEWSLETTER_ARCHIVES TABLE
# ============================================================
print("\n[11/14] Fixing newsletter_archives table...")
try:
op.alter_column('newsletter_archives', 'title', type_=sa.String(), postgresql_using='title::varchar')
op.alter_column('newsletter_archives', 'document_url', type_=sa.String(), postgresql_using='document_url::varchar')
op.alter_column('newsletter_archives', 'document_type', type_=sa.String(), postgresql_using='document_type::varchar')
print(" ✓ Changed column types")
except Exception as e:
print(f" ⚠️ Warning: {e}")
op.alter_column('newsletter_archives', 'document_type', nullable=True)
print(" ✓ Fixed nullable constraint")
# ============================================================
# 12. FIX FINANCIAL_REPORTS TABLE
# ============================================================
print("\n[12/14] Fixing financial_reports table...")
try:
op.alter_column('financial_reports', 'title', type_=sa.String(), postgresql_using='title::varchar')
op.alter_column('financial_reports', 'document_url', type_=sa.String(), postgresql_using='document_url::varchar')
op.alter_column('financial_reports', 'document_type', type_=sa.String(), postgresql_using='document_type::varchar')
print(" ✓ Changed column types")
except Exception as e:
print(f" ⚠️ Warning: {e}")
op.alter_column('financial_reports', 'document_type', nullable=True)
print(" ✓ Fixed nullable constraint")
# ============================================================
# 13. FIX IMPORT_JOBS TABLE
# ============================================================
print("\n[13/14] Fixing import_jobs table...")
try:
op.alter_column('import_jobs', 'filename', type_=sa.String(), postgresql_using='filename::varchar')
op.alter_column('import_jobs', 'file_key', type_=sa.String(), postgresql_using='file_key::varchar')
print(" ✓ Changed VARCHAR(n) to VARCHAR")
# Change JSONB to JSON
op.alter_column('import_jobs', 'errors', type_=JSON(), postgresql_using='errors::json')
print(" ✓ Changed errors JSONB to JSON")
except Exception as e:
print(f" ⚠️ Warning: {e}")
# Fix nullable constraints
op.alter_column('import_jobs', 'processed_rows', nullable=False)
op.alter_column('import_jobs', 'successful_rows', nullable=False)
op.alter_column('import_jobs', 'failed_rows', nullable=False)
op.alter_column('import_jobs', 'errors', nullable=False)
op.alter_column('import_jobs', 'started_at', nullable=False)
print(" ✓ Fixed nullable constraints")
# ============================================================
# 14. FIX SUBSCRIPTION_PLANS TABLE
# ============================================================
print("\n[14/14] Fixing subscription_plans table...")
try:
op.alter_column('subscription_plans', 'name', type_=sa.String(), postgresql_using='name::varchar')
op.alter_column('subscription_plans', 'billing_cycle', type_=sa.String(), postgresql_using='billing_cycle::varchar')
op.alter_column('subscription_plans', 'stripe_price_id', type_=sa.String(), postgresql_using='stripe_price_id::varchar')
print(" ✓ Changed VARCHAR(n) to VARCHAR")
except Exception as e:
print(f" ⚠️ Warning: {e}")
op.alter_column('subscription_plans', 'minimum_price_cents', nullable=False)
print(" ✓ Fixed nullable constraint")
print("\n✅ Schema alignment complete! PROD now matches DEV (source of truth)")
def downgrade() -> None:
"""Revert alignment changes (not recommended)"""
print("⚠️ Downgrade not supported for alignment migration")
print(" To revert, restore from backup")
pass

345
check_db_integrity.py Normal file
View File

@@ -0,0 +1,345 @@
#!/usr/bin/env python3
"""
Database Integrity Checker
Compares schema and data integrity between development and production databases
"""
import sys
from sqlalchemy import create_engine, inspect, text
from sqlalchemy.engine import reflection
import json
from collections import defaultdict
# Database URLs
DEV_DB = "postgresql://postgres:RchhcpaUKZuZuMOvB5kwCP1weLBnAG6tNMXE5FHdk8AwCvolBMALYFVYRM7WCl9x@10.9.23.11:5001/membership_demo"
PROD_DB = "postgresql://postgres:fDv3fRvMgfPueDWDUxj27NJVaynsewIdh6b2Hb28tcvG3Ew6mhscASg2kulx4tr7@10.9.23.11:54321/loaf_new"
def get_db_info(engine, label):
"""Get comprehensive database information"""
inspector = inspect(engine)
info = {
'label': label,
'tables': {},
'indexes': {},
'foreign_keys': {},
'sequences': [],
'enums': []
}
# Get all table names
table_names = inspector.get_table_names()
for table_name in table_names:
# Get columns
columns = inspector.get_columns(table_name)
info['tables'][table_name] = {
'columns': {
col['name']: {
'type': str(col['type']),
'nullable': col['nullable'],
'default': str(col.get('default', None)),
'autoincrement': col.get('autoincrement', False)
}
for col in columns
},
'column_count': len(columns)
}
# Get primary keys
pk = inspector.get_pk_constraint(table_name)
info['tables'][table_name]['primary_key'] = pk.get('constrained_columns', [])
# Get indexes
indexes = inspector.get_indexes(table_name)
info['indexes'][table_name] = [
{
'name': idx['name'],
'columns': idx['column_names'],
'unique': idx['unique']
}
for idx in indexes
]
# Get foreign keys
fks = inspector.get_foreign_keys(table_name)
info['foreign_keys'][table_name] = [
{
'name': fk.get('name'),
'columns': fk['constrained_columns'],
'referred_table': fk['referred_table'],
'referred_columns': fk['referred_columns']
}
for fk in fks
]
# Get sequences
with engine.connect() as conn:
result = conn.execute(text("""
SELECT sequence_name
FROM information_schema.sequences
WHERE sequence_schema = 'public'
"""))
info['sequences'] = [row[0] for row in result]
# Get enum types
result = conn.execute(text("""
SELECT t.typname as enum_name,
array_agg(e.enumlabel ORDER BY e.enumsortorder) as enum_values
FROM pg_type t
JOIN pg_enum e ON t.oid = e.enumtypid
WHERE t.typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = 'public')
GROUP BY t.typname
"""))
info['enums'] = {row[0]: row[1] for row in result}
return info
def compare_tables(dev_info, prod_info):
"""Compare tables between databases"""
dev_tables = set(dev_info['tables'].keys())
prod_tables = set(prod_info['tables'].keys())
print("\n" + "="*80)
print("TABLE COMPARISON")
print("="*80)
# Tables only in dev
dev_only = dev_tables - prod_tables
if dev_only:
print(f"\n❌ Tables only in DEV ({len(dev_only)}):")
for table in sorted(dev_only):
print(f" - {table}")
# Tables only in prod
prod_only = prod_tables - dev_tables
if prod_only:
print(f"\n❌ Tables only in PROD ({len(prod_only)}):")
for table in sorted(prod_only):
print(f" - {table}")
# Common tables
common = dev_tables & prod_tables
print(f"\n✅ Common tables: {len(common)}")
return common
def compare_columns(dev_info, prod_info, common_tables):
"""Compare columns for common tables"""
print("\n" + "="*80)
print("COLUMN COMPARISON")
print("="*80)
issues = []
for table in sorted(common_tables):
dev_cols = set(dev_info['tables'][table]['columns'].keys())
prod_cols = set(prod_info['tables'][table]['columns'].keys())
dev_only = dev_cols - prod_cols
prod_only = prod_cols - dev_cols
if dev_only or prod_only:
print(f"\n⚠️ Table '{table}' has column differences:")
if dev_only:
print(f" Columns only in DEV: {', '.join(sorted(dev_only))}")
issues.append(f"{table}: DEV-only columns: {', '.join(dev_only)}")
if prod_only:
print(f" Columns only in PROD: {', '.join(sorted(prod_only))}")
issues.append(f"{table}: PROD-only columns: {', '.join(prod_only)}")
# Compare column types for common columns
common_cols = dev_cols & prod_cols
for col in common_cols:
dev_col = dev_info['tables'][table]['columns'][col]
prod_col = prod_info['tables'][table]['columns'][col]
if dev_col['type'] != prod_col['type']:
print(f" ⚠️ Column '{col}' type mismatch:")
print(f" DEV: {dev_col['type']}")
print(f" PROD: {prod_col['type']}")
issues.append(f"{table}.{col}: Type mismatch")
if dev_col['nullable'] != prod_col['nullable']:
print(f" ⚠️ Column '{col}' nullable mismatch:")
print(f" DEV: {dev_col['nullable']}")
print(f" PROD: {prod_col['nullable']}")
issues.append(f"{table}.{col}: Nullable mismatch")
if not issues:
print("\n✅ All columns match between DEV and PROD")
return issues
def compare_enums(dev_info, prod_info):
"""Compare enum types"""
print("\n" + "="*80)
print("ENUM TYPE COMPARISON")
print("="*80)
dev_enums = set(dev_info['enums'].keys())
prod_enums = set(prod_info['enums'].keys())
dev_only = dev_enums - prod_enums
prod_only = prod_enums - dev_enums
issues = []
if dev_only:
print(f"\n❌ Enums only in DEV: {', '.join(sorted(dev_only))}")
issues.extend([f"Enum '{e}' only in DEV" for e in dev_only])
if prod_only:
print(f"\n❌ Enums only in PROD: {', '.join(sorted(prod_only))}")
issues.extend([f"Enum '{e}' only in PROD" for e in prod_only])
# Compare enum values for common enums
common = dev_enums & prod_enums
for enum_name in sorted(common):
dev_values = set(dev_info['enums'][enum_name])
prod_values = set(prod_info['enums'][enum_name])
if dev_values != prod_values:
print(f"\n⚠️ Enum '{enum_name}' values differ:")
print(f" DEV: {', '.join(sorted(dev_values))}")
print(f" PROD: {', '.join(sorted(prod_values))}")
issues.append(f"Enum '{enum_name}' values differ")
if not issues:
print("\n✅ All enum types match")
return issues
def check_migration_history(dev_engine, prod_engine):
"""Check Alembic migration history"""
print("\n" + "="*80)
print("MIGRATION HISTORY")
print("="*80)
try:
with dev_engine.connect() as dev_conn:
dev_result = dev_conn.execute(text("SELECT version_num FROM alembic_version"))
dev_version = dev_result.fetchone()
dev_version = dev_version[0] if dev_version else None
with prod_engine.connect() as prod_conn:
prod_result = prod_conn.execute(text("SELECT version_num FROM alembic_version"))
prod_version = prod_result.fetchone()
prod_version = prod_version[0] if prod_version else None
print(f"\nDEV migration version: {dev_version}")
print(f"PROD migration version: {prod_version}")
if dev_version == prod_version:
print("✅ Migration versions match")
return []
else:
print("❌ Migration versions DO NOT match")
return ["Migration versions differ"]
except Exception as e:
print(f"⚠️ Could not check migration history: {str(e)}")
return [f"Migration check failed: {str(e)}"]
def get_row_counts(engine, tables):
"""Get row counts for all tables"""
counts = {}
with engine.connect() as conn:
for table in tables:
result = conn.execute(text(f"SELECT COUNT(*) FROM {table}"))
counts[table] = result.fetchone()[0]
return counts
def compare_data_counts(dev_engine, prod_engine, common_tables):
"""Compare row counts between databases"""
print("\n" + "="*80)
print("DATA ROW COUNTS")
print("="*80)
print("\nGetting DEV row counts...")
dev_counts = get_row_counts(dev_engine, common_tables)
print("Getting PROD row counts...")
prod_counts = get_row_counts(prod_engine, common_tables)
print(f"\n{'Table':<30} {'DEV':<15} {'PROD':<15} {'Diff':<15}")
print("-" * 75)
for table in sorted(common_tables):
dev_count = dev_counts[table]
prod_count = prod_counts[table]
diff = dev_count - prod_count
diff_str = f"+{diff}" if diff > 0 else str(diff)
status = "⚠️ " if abs(diff) > 0 else ""
print(f"{status} {table:<28} {dev_count:<15} {prod_count:<15} {diff_str:<15}")
def main():
print("\n" + "="*80)
print("DATABASE INTEGRITY CHECKER")
print("="*80)
print(f"\nDEV: {DEV_DB.split('@')[1]}") # Hide password
print(f"PROD: {PROD_DB.split('@')[1]}")
try:
# Connect to databases
print("\n🔌 Connecting to databases...")
dev_engine = create_engine(DEV_DB)
prod_engine = create_engine(PROD_DB)
# Test connections
with dev_engine.connect() as conn:
conn.execute(text("SELECT 1"))
print("✅ Connected to DEV database")
with prod_engine.connect() as conn:
conn.execute(text("SELECT 1"))
print("✅ Connected to PROD database")
# Get database info
print("\n📊 Gathering database information...")
dev_info = get_db_info(dev_engine, "DEV")
prod_info = get_db_info(prod_engine, "PROD")
# Run comparisons
all_issues = []
common_tables = compare_tables(dev_info, prod_info)
column_issues = compare_columns(dev_info, prod_info, common_tables)
all_issues.extend(column_issues)
enum_issues = compare_enums(dev_info, prod_info)
all_issues.extend(enum_issues)
migration_issues = check_migration_history(dev_engine, prod_engine)
all_issues.extend(migration_issues)
compare_data_counts(dev_engine, prod_engine, common_tables)
# Summary
print("\n" + "="*80)
print("SUMMARY")
print("="*80)
if all_issues:
print(f"\n❌ Found {len(all_issues)} integrity issues:")
for i, issue in enumerate(all_issues, 1):
print(f" {i}. {issue}")
print("\n⚠️ Databases are NOT in sync!")
sys.exit(1)
else:
print("\n✅ Databases are in sync!")
print("✅ No integrity issues found")
sys.exit(0)
except Exception as e:
print(f"\n❌ Error: {str(e)}")
import traceback
traceback.print_exc()
sys.exit(1)
if __name__ == "__main__":
main()