12 Commits

Author SHA1 Message Date
9c3f3c88b8 Merge pull request 'Add comprehensive column check and migration 009' (#14) from dev into loaf-prod
Reviewed-on: #14
2026-01-04 16:19:51 +00:00
849a6a32af Merge pull request 'Add missing donations table columns' (#13) from dev into loaf-prod
Reviewed-on: #13
2026-01-04 16:10:27 +00:00
69b8185414 Merge pull request 'Fix migration 007 - skip existing columns' (#12) from dev into loaf-prod
Reviewed-on: #12
2026-01-04 16:06:27 +00:00
f5f8ca8dc6 Merge pull request 'Add missing subscription_plans columns' (#11) from dev into loaf-prod
Reviewed-on: #11
2026-01-04 16:03:43 +00:00
661a4cbb7c Merge pull request 'Fix subscription_plans.is_active column name' (#10) from dev into loaf-prod
Reviewed-on: #10
2026-01-04 15:58:05 +00:00
a01a8b9915 Merge pull request 'Superadmin nullable fix' (#9) from dev into loaf-prod
Reviewed-on: #9
2026-01-04 15:35:59 +00:00
e126cb988c Merge pull request 'Subscription and Storage data mismatch' (#8) from dev into loaf-prod
Reviewed-on: #8
2026-01-04 15:28:46 +00:00
fd988241a1 Merge pull request 'Subscription and Storage data mismatch' (#7) from dev into loaf-prod
Reviewed-on: #7
2026-01-04 15:24:11 +00:00
c28eddca67 Merge pull request 'Fix database mismatches' (#6) from dev into loaf-prod
Reviewed-on: #6
2026-01-04 15:17:18 +00:00
e20542ccdc Merge pull request 'Fix database mismatches' (#5) from dev into loaf-prod
Reviewed-on: #5
2026-01-04 15:02:09 +00:00
b3f1f5f789 Merge pull request 'Prod Deployment Preparation' (#4) from dev into loaf-prod
Reviewed-on: #4
2026-01-04 12:10:12 +00:00
1da045f73f Merge pull request 'Update Gitignore' (#3) from dev into loaf-prod
Reviewed-on: #3
2026-01-02 08:45:29 +00:00
34 changed files with 108 additions and 2335 deletions

View File

@@ -6,10 +6,6 @@ JWT_SECRET=your-secret-key-change-this-in-production
JWT_ALGORITHM=HS256
ACCESS_TOKEN_EXPIRE_MINUTES=30
# Settings Encryption (for database-stored sensitive settings)
# Generate with: python -c "import secrets; print(secrets.token_urlsafe(64))"
SETTINGS_ENCRYPTION_KEY=your-encryption-key-generate-with-command-above
# SMTP Email Configuration (Port 465 - SSL/TLS)
SMTP_HOST=p.konceptkit.com
SMTP_PORT=465
@@ -32,14 +28,7 @@ SMTP_FROM_NAME=LOAF Membership
# Frontend URL
FRONTEND_URL=http://localhost:3000
# Backend URL (for webhook URLs and API references)
# Used to construct Stripe webhook URL shown in Admin Settings
BACKEND_URL=http://localhost:8000
# Stripe Configuration (NOW DATABASE-DRIVEN via Admin Settings page)
# Configure Stripe credentials through the Admin Settings UI (requires SETTINGS_ENCRYPTION_KEY)
# No longer requires .env variables - managed through database for dynamic updates
# Legacy .env variables below are deprecated:
# Stripe Configuration (for future payment integration)
# STRIPE_SECRET_KEY=sk_test_...
# STRIPE_WEBHOOK_SECRET=whsec_...

3
.gitignore vendored
View File

@@ -1,5 +1,3 @@
.env
.venv
# ============================================================================
# Python Backend .gitignore
# For FastAPI + PostgreSQL + Cloudflare R2 + Stripe
@@ -10,7 +8,6 @@
.env.*
!.env.example
.envrc
.sh
# ===== Python =====
# Byte-compiled / optimized / DLL files

View File

@@ -1,20 +0,0 @@
# Use an official Python image (Linux)
FROM python:3.12-slim
# Set a working directory
WORKDIR /app
# Copy dependency list
COPY requirements.txt .
# Install dependencies
RUN pip3 install --no-cache-dir -r requirements.txt
# Copy the rest of the project
COPY . .
# Expose port (whatever your backend runs on)
EXPOSE 8000
# Run exactly your command
CMD ["python", "-m", "uvicorn", "server:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -24,48 +24,31 @@ depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Add missing user fields (skip if already exists)"""
from sqlalchemy import inspect
conn = op.get_bind()
inspector = inspect(conn)
existing_columns = {col['name'] for col in inspector.get_columns('users')}
"""Add missing user fields"""
# Add scholarship_reason
if 'scholarship_reason' not in existing_columns:
op.add_column('users', sa.Column('scholarship_reason', sa.Text(), nullable=True))
op.add_column('users', sa.Column('scholarship_reason', sa.Text(), nullable=True))
# Add directory fields
if 'directory_email' not in existing_columns:
op.add_column('users', sa.Column('directory_email', sa.String(), nullable=True))
if 'directory_bio' not in existing_columns:
op.add_column('users', sa.Column('directory_bio', sa.Text(), nullable=True))
if 'directory_address' not in existing_columns:
op.add_column('users', sa.Column('directory_address', sa.String(), nullable=True))
if 'directory_phone' not in existing_columns:
op.add_column('users', sa.Column('directory_phone', sa.String(), nullable=True))
if 'directory_dob' not in existing_columns:
op.add_column('users', sa.Column('directory_dob', sa.DateTime(), nullable=True))
if 'directory_partner_name' not in existing_columns:
op.add_column('users', sa.Column('directory_partner_name', sa.String(), nullable=True))
op.add_column('users', sa.Column('directory_email', sa.String(), nullable=True))
op.add_column('users', sa.Column('directory_bio', sa.Text(), nullable=True))
op.add_column('users', sa.Column('directory_address', sa.String(), nullable=True))
op.add_column('users', sa.Column('directory_phone', sa.String(), nullable=True))
op.add_column('users', sa.Column('directory_dob', sa.DateTime(), nullable=True))
op.add_column('users', sa.Column('directory_partner_name', sa.String(), nullable=True))
# Rename profile_image_url to profile_photo_url (skip if already renamed)
if 'profile_image_url' in existing_columns and 'profile_photo_url' not in existing_columns:
op.alter_column('users', 'profile_image_url', new_column_name='profile_photo_url')
# Rename profile_image_url to profile_photo_url (for consistency with models.py)
op.alter_column('users', 'profile_image_url', new_column_name='profile_photo_url')
# Add social media fields
if 'social_media_facebook' not in existing_columns:
op.add_column('users', sa.Column('social_media_facebook', sa.String(), nullable=True))
if 'social_media_instagram' not in existing_columns:
op.add_column('users', sa.Column('social_media_instagram', sa.String(), nullable=True))
if 'social_media_twitter' not in existing_columns:
op.add_column('users', sa.Column('social_media_twitter', sa.String(), nullable=True))
if 'social_media_linkedin' not in existing_columns:
op.add_column('users', sa.Column('social_media_linkedin', sa.String(), nullable=True))
op.add_column('users', sa.Column('social_media_facebook', sa.String(), nullable=True))
op.add_column('users', sa.Column('social_media_instagram', sa.String(), nullable=True))
op.add_column('users', sa.Column('social_media_twitter', sa.String(), nullable=True))
op.add_column('users', sa.Column('social_media_linkedin', sa.String(), nullable=True))
# Add email_verification_expires if missing
if 'email_verification_expires' not in existing_columns:
op.add_column('users', sa.Column('email_verification_expires', sa.DateTime(), nullable=True))
# Add email_verification_expires (exists in DB but not in models.py initially)
# Check if it already exists, if not add it
# This field should already exist from the initial schema, but adding for completeness
def downgrade() -> None:

View File

@@ -22,24 +22,11 @@ depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Add optional pre-filled information fields to user_invitations (skip if already exists)"""
from sqlalchemy import inspect
"""Add optional pre-filled information fields to user_invitations"""
conn = op.get_bind()
inspector = inspect(conn)
existing_columns = {col['name'] for col in inspector.get_columns('user_invitations')}
# Add first_name if missing
if 'first_name' not in existing_columns:
op.add_column('user_invitations', sa.Column('first_name', sa.String(), nullable=True))
# Add last_name if missing
if 'last_name' not in existing_columns:
op.add_column('user_invitations', sa.Column('last_name', sa.String(), nullable=True))
# Add phone if missing
if 'phone' not in existing_columns:
op.add_column('user_invitations', sa.Column('phone', sa.String(), nullable=True))
op.add_column('user_invitations', sa.Column('first_name', sa.String(), nullable=True))
op.add_column('user_invitations', sa.Column('last_name', sa.String(), nullable=True))
op.add_column('user_invitations', sa.Column('phone', sa.String(), nullable=True))
def downgrade() -> None:

View File

@@ -22,26 +22,16 @@ depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Add file_size_bytes column to document tables (skip if already exists)"""
from sqlalchemy import inspect
"""Add file_size_bytes column to document tables"""
conn = op.get_bind()
inspector = inspect(conn)
# Add to newsletter_archives
op.add_column('newsletter_archives', sa.Column('file_size_bytes', sa.Integer(), nullable=True))
# Add to newsletter_archives if missing
existing_columns = {col['name'] for col in inspector.get_columns('newsletter_archives')}
if 'file_size_bytes' not in existing_columns:
op.add_column('newsletter_archives', sa.Column('file_size_bytes', sa.Integer(), nullable=True))
# Add to financial_reports
op.add_column('financial_reports', sa.Column('file_size_bytes', sa.Integer(), nullable=True))
# Add to financial_reports if missing
existing_columns = {col['name'] for col in inspector.get_columns('financial_reports')}
if 'file_size_bytes' not in existing_columns:
op.add_column('financial_reports', sa.Column('file_size_bytes', sa.Integer(), nullable=True))
# Add to bylaws_documents if missing
existing_columns = {col['name'] for col in inspector.get_columns('bylaws_documents')}
if 'file_size_bytes' not in existing_columns:
op.add_column('bylaws_documents', sa.Column('file_size_bytes', sa.Integer(), nullable=True))
# Add to bylaws_documents
op.add_column('bylaws_documents', sa.Column('file_size_bytes', sa.Integer(), nullable=True))
def downgrade() -> None:

View File

@@ -22,44 +22,26 @@ depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Add missing columns and fix naming (skip if already exists)"""
from sqlalchemy import inspect
"""Add missing columns and fix naming"""
conn = op.get_bind()
inspector = inspect(conn)
# Add missing columns to subscriptions table
op.add_column('subscriptions', sa.Column('start_date', sa.DateTime(timezone=True), nullable=True))
op.add_column('subscriptions', sa.Column('end_date', sa.DateTime(timezone=True), nullable=True))
op.add_column('subscriptions', sa.Column('amount_paid_cents', sa.Integer(), nullable=True))
op.add_column('subscriptions', sa.Column('manual_payment_notes', sa.Text(), nullable=True))
op.add_column('subscriptions', sa.Column('manual_payment_admin_id', UUID(as_uuid=True), nullable=True))
op.add_column('subscriptions', sa.Column('manual_payment_date', sa.DateTime(timezone=True), nullable=True))
op.add_column('subscriptions', sa.Column('payment_method', sa.String(50), nullable=True))
# Check existing columns in subscriptions table
existing_columns = {col['name'] for col in inspector.get_columns('subscriptions')}
# Add foreign key for manual_payment_admin_id
op.create_foreign_key(
'subscriptions_manual_payment_admin_id_fkey',
'subscriptions', 'users',
['manual_payment_admin_id'], ['id']
)
# Add missing columns to subscriptions table only if they don't exist
if 'start_date' not in existing_columns:
op.add_column('subscriptions', sa.Column('start_date', sa.DateTime(timezone=True), nullable=True))
if 'end_date' not in existing_columns:
op.add_column('subscriptions', sa.Column('end_date', sa.DateTime(timezone=True), nullable=True))
if 'amount_paid_cents' not in existing_columns:
op.add_column('subscriptions', sa.Column('amount_paid_cents', sa.Integer(), nullable=True))
if 'manual_payment_notes' not in existing_columns:
op.add_column('subscriptions', sa.Column('manual_payment_notes', sa.Text(), nullable=True))
if 'manual_payment_admin_id' not in existing_columns:
op.add_column('subscriptions', sa.Column('manual_payment_admin_id', UUID(as_uuid=True), nullable=True))
if 'manual_payment_date' not in existing_columns:
op.add_column('subscriptions', sa.Column('manual_payment_date', sa.DateTime(timezone=True), nullable=True))
if 'payment_method' not in existing_columns:
op.add_column('subscriptions', sa.Column('payment_method', sa.String(50), nullable=True))
# Add foreign key for manual_payment_admin_id if it doesn't exist
existing_fks = [fk['name'] for fk in inspector.get_foreign_keys('subscriptions')]
if 'subscriptions_manual_payment_admin_id_fkey' not in existing_fks:
op.create_foreign_key(
'subscriptions_manual_payment_admin_id_fkey',
'subscriptions', 'users',
['manual_payment_admin_id'], ['id']
)
# Rename storage_usage.last_calculated_at to last_updated (only if needed)
storage_columns = {col['name'] for col in inspector.get_columns('storage_usage')}
if 'last_calculated_at' in storage_columns and 'last_updated' not in storage_columns:
op.alter_column('storage_usage', 'last_calculated_at', new_column_name='last_updated')
# Rename storage_usage.last_calculated_at to last_updated
op.alter_column('storage_usage', 'last_calculated_at', new_column_name='last_updated')
def downgrade() -> None:

View File

@@ -20,16 +20,8 @@ depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Rename is_active to active (skip if already renamed)"""
from sqlalchemy import inspect
conn = op.get_bind()
inspector = inspect(conn)
# Check if rename is needed
existing_columns = {col['name'] for col in inspector.get_columns('subscription_plans')}
if 'is_active' in existing_columns and 'active' not in existing_columns:
op.alter_column('subscription_plans', 'is_active', new_column_name='active')
"""Rename is_active to active"""
op.alter_column('subscription_plans', 'is_active', new_column_name='active')
def downgrade() -> None:

View File

@@ -1,37 +0,0 @@
"""add_email_verification_expires
Revision ID: 010_add_email_exp
Revises: 009_add_all_missing
Create Date: 2026-01-05
Fixes:
- Add missing email_verification_expires column to users table
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '010_add_email_exp'
down_revision: Union[str, None] = '009_add_all_missing'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Add email_verification_expires column (skip if already exists)"""
from sqlalchemy import inspect
conn = op.get_bind()
inspector = inspect(conn)
existing_columns = {col['name'] for col in inspector.get_columns('users')}
# Add email_verification_expires if missing
if 'email_verification_expires' not in existing_columns:
op.add_column('users', sa.Column('email_verification_expires', sa.DateTime(), nullable=True))
def downgrade() -> None:
"""Remove email_verification_expires column"""
op.drop_column('users', 'email_verification_expires')

View File

@@ -1,410 +0,0 @@
"""align_prod_with_dev
Revision ID: 011_align_prod_dev
Revises: 010_add_email_exp
Create Date: 2026-01-05
Aligns PROD database schema with DEV database schema (source of truth).
Fixes type mismatches, removes PROD-only columns, adds DEV-only columns, updates nullable constraints.
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import JSONB, JSON
# revision identifiers, used by Alembic.
revision: str = '011_align_prod_dev'
down_revision: Union[str, None] = '010_add_email_exp'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Align PROD schema with DEV schema (source of truth)"""
from sqlalchemy import inspect
conn = op.get_bind()
inspector = inspect(conn)
print("Starting schema alignment: PROD → DEV (source of truth)...")
# ============================================================
# 1. FIX USERS TABLE
# ============================================================
print("\n[1/14] Fixing users table...")
users_columns = {col['name'] for col in inspector.get_columns('users')}
# Remove PROD-only columns (not in models.py or DEV)
if 'bio' in users_columns:
op.drop_column('users', 'bio')
print(" ✓ Removed users.bio (PROD-only)")
if 'interests' in users_columns:
op.drop_column('users', 'interests')
print(" ✓ Removed users.interests (PROD-only)")
try:
# Change constrained VARCHAR(n) to unconstrained VARCHAR
op.alter_column('users', 'first_name', type_=sa.String(), postgresql_using='first_name::varchar')
op.alter_column('users', 'last_name', type_=sa.String(), postgresql_using='last_name::varchar')
op.alter_column('users', 'email', type_=sa.String(), postgresql_using='email::varchar')
op.alter_column('users', 'phone', type_=sa.String(), postgresql_using='phone::varchar')
op.alter_column('users', 'city', type_=sa.String(), postgresql_using='city::varchar')
op.alter_column('users', 'state', type_=sa.String(), postgresql_using='state::varchar')
op.alter_column('users', 'zipcode', type_=sa.String(), postgresql_using='zipcode::varchar')
op.alter_column('users', 'partner_first_name', type_=sa.String(), postgresql_using='partner_first_name::varchar')
op.alter_column('users', 'partner_last_name', type_=sa.String(), postgresql_using='partner_last_name::varchar')
op.alter_column('users', 'referred_by_member_name', type_=sa.String(), postgresql_using='referred_by_member_name::varchar')
op.alter_column('users', 'password_hash', type_=sa.String(), postgresql_using='password_hash::varchar')
op.alter_column('users', 'email_verification_token', type_=sa.String(), postgresql_using='email_verification_token::varchar')
op.alter_column('users', 'password_reset_token', type_=sa.String(), postgresql_using='password_reset_token::varchar')
print(" ✓ Changed VARCHAR(n) to VARCHAR")
# Change TEXT to VARCHAR
op.alter_column('users', 'address', type_=sa.String(), postgresql_using='address::varchar')
op.alter_column('users', 'profile_photo_url', type_=sa.String(), postgresql_using='profile_photo_url::varchar')
print(" ✓ Changed TEXT to VARCHAR")
# Change DATE to TIMESTAMP
op.alter_column('users', 'date_of_birth', type_=sa.DateTime(), postgresql_using='date_of_birth::timestamp')
op.alter_column('users', 'member_since', type_=sa.DateTime(), postgresql_using='member_since::timestamp')
print(" ✓ Changed DATE to TIMESTAMP")
# Change JSONB to JSON
op.alter_column('users', 'lead_sources', type_=JSON(), postgresql_using='lead_sources::json')
print(" ✓ Changed lead_sources JSONB to JSON")
# Change TEXT to JSON for volunteer_interests
op.alter_column('users', 'volunteer_interests', type_=JSON(), postgresql_using='volunteer_interests::json')
print(" ✓ Changed volunteer_interests TEXT to JSON")
except Exception as e:
print(f" ⚠️ Warning: Some type conversions failed: {e}")
# Fill NULL values with defaults BEFORE setting NOT NULL constraints
print(" ⏳ Filling NULL values with defaults...")
# Update string fields
conn.execute(sa.text("UPDATE users SET address = '' WHERE address IS NULL"))
conn.execute(sa.text("UPDATE users SET city = '' WHERE city IS NULL"))
conn.execute(sa.text("UPDATE users SET state = '' WHERE state IS NULL"))
conn.execute(sa.text("UPDATE users SET zipcode = '' WHERE zipcode IS NULL"))
conn.execute(sa.text("UPDATE users SET phone = '' WHERE phone IS NULL"))
# Update date_of_birth with sentinel date
conn.execute(sa.text("UPDATE users SET date_of_birth = '1900-01-01'::timestamp WHERE date_of_birth IS NULL"))
# Update boolean fields
conn.execute(sa.text("UPDATE users SET show_in_directory = false WHERE show_in_directory IS NULL"))
conn.execute(sa.text("UPDATE users SET newsletter_publish_name = false WHERE newsletter_publish_name IS NULL"))
conn.execute(sa.text("UPDATE users SET newsletter_publish_birthday = false WHERE newsletter_publish_birthday IS NULL"))
conn.execute(sa.text("UPDATE users SET newsletter_publish_photo = false WHERE newsletter_publish_photo IS NULL"))
conn.execute(sa.text("UPDATE users SET newsletter_publish_none = false WHERE newsletter_publish_none IS NULL"))
conn.execute(sa.text("UPDATE users SET force_password_change = false WHERE force_password_change IS NULL"))
conn.execute(sa.text("UPDATE users SET scholarship_requested = false WHERE scholarship_requested IS NULL"))
conn.execute(sa.text("UPDATE users SET accepts_tos = false WHERE accepts_tos IS NULL"))
# Check how many rows were updated
null_check = conn.execute(sa.text("""
SELECT
COUNT(*) FILTER (WHERE address = '') as address_filled,
COUNT(*) FILTER (WHERE date_of_birth = '1900-01-01'::timestamp) as dob_filled
FROM users
""")).fetchone()
print(f" ✓ Filled NULLs: {null_check[0]} addresses, {null_check[1]} dates of birth")
# Now safe to set NOT NULL constraints
op.alter_column('users', 'address', nullable=False)
op.alter_column('users', 'city', nullable=False)
op.alter_column('users', 'state', nullable=False)
op.alter_column('users', 'zipcode', nullable=False)
op.alter_column('users', 'phone', nullable=False)
op.alter_column('users', 'date_of_birth', nullable=False)
op.alter_column('users', 'show_in_directory', nullable=False)
op.alter_column('users', 'newsletter_publish_name', nullable=False)
op.alter_column('users', 'newsletter_publish_birthday', nullable=False)
op.alter_column('users', 'newsletter_publish_photo', nullable=False)
op.alter_column('users', 'newsletter_publish_none', nullable=False)
op.alter_column('users', 'force_password_change', nullable=False)
op.alter_column('users', 'scholarship_requested', nullable=False)
op.alter_column('users', 'accepts_tos', nullable=False)
print(" ✓ Set NOT NULL constraints")
# ============================================================
# 2. FIX DONATIONS TABLE
# ============================================================
print("\n[2/14] Fixing donations table...")
donations_columns = {col['name'] for col in inspector.get_columns('donations')}
# Remove PROD-only columns
if 'is_anonymous' in donations_columns:
op.drop_column('donations', 'is_anonymous')
print(" ✓ Removed donations.is_anonymous (PROD-only)")
if 'completed_at' in donations_columns:
op.drop_column('donations', 'completed_at')
print(" ✓ Removed donations.completed_at (PROD-only)")
if 'message' in donations_columns:
op.drop_column('donations', 'message')
print(" ✓ Removed donations.message (PROD-only)")
try:
op.alter_column('donations', 'donor_email', type_=sa.String(), postgresql_using='donor_email::varchar')
op.alter_column('donations', 'donor_name', type_=sa.String(), postgresql_using='donor_name::varchar')
op.alter_column('donations', 'stripe_payment_intent_id', type_=sa.String(), postgresql_using='stripe_payment_intent_id::varchar')
print(" ✓ Changed VARCHAR(n) to VARCHAR")
except Exception as e:
print(f" ⚠️ Warning: Type conversion failed: {e}")
# ============================================================
# 3. FIX SUBSCRIPTIONS TABLE
# ============================================================
print("\n[3/14] Fixing subscriptions table...")
subscriptions_columns = {col['name'] for col in inspector.get_columns('subscriptions')}
# Remove PROD-only columns
if 'cancel_at_period_end' in subscriptions_columns:
op.drop_column('subscriptions', 'cancel_at_period_end')
print(" ✓ Removed subscriptions.cancel_at_period_end (PROD-only)")
if 'canceled_at' in subscriptions_columns:
op.drop_column('subscriptions', 'canceled_at')
print(" ✓ Removed subscriptions.canceled_at (PROD-only)")
if 'current_period_start' in subscriptions_columns:
op.drop_column('subscriptions', 'current_period_start')
print(" ✓ Removed subscriptions.current_period_start (PROD-only)")
if 'current_period_end' in subscriptions_columns:
op.drop_column('subscriptions', 'current_period_end')
print(" ✓ Removed subscriptions.current_period_end (PROD-only)")
try:
op.alter_column('subscriptions', 'stripe_subscription_id', type_=sa.String(), postgresql_using='stripe_subscription_id::varchar')
op.alter_column('subscriptions', 'stripe_customer_id', type_=sa.String(), postgresql_using='stripe_customer_id::varchar')
op.alter_column('subscriptions', 'payment_method', type_=sa.String(), postgresql_using='payment_method::varchar')
print(" ✓ Changed VARCHAR(n) to VARCHAR")
except Exception as e:
print(f" ⚠️ Warning: Type conversion failed: {e}")
# Fix nullable constraints
op.alter_column('subscriptions', 'start_date', nullable=False)
op.alter_column('subscriptions', 'manual_payment', nullable=False)
op.alter_column('subscriptions', 'donation_cents', nullable=False)
op.alter_column('subscriptions', 'base_subscription_cents', nullable=False)
print(" ✓ Fixed nullable constraints")
# ============================================================
# 4. FIX STORAGE_USAGE TABLE
# ============================================================
print("\n[4/14] Fixing storage_usage table...")
storage_columns = {col['name'] for col in inspector.get_columns('storage_usage')}
# Remove PROD-only columns
if 'created_at' in storage_columns:
op.drop_column('storage_usage', 'created_at')
print(" ✓ Removed storage_usage.created_at (PROD-only)")
if 'updated_at' in storage_columns:
op.drop_column('storage_usage', 'updated_at')
print(" ✓ Removed storage_usage.updated_at (PROD-only)")
op.alter_column('storage_usage', 'max_bytes_allowed', nullable=False)
print(" ✓ Fixed nullable constraint")
# ============================================================
# 5. FIX EVENT_GALLERIES TABLE (Add missing DEV columns)
# ============================================================
print("\n[5/14] Fixing event_galleries table...")
event_galleries_columns = {col['name'] for col in inspector.get_columns('event_galleries')}
# Add DEV-only columns (exist in models.py but not in PROD)
if 'image_key' not in event_galleries_columns:
op.add_column('event_galleries', sa.Column('image_key', sa.String(), nullable=False, server_default=''))
print(" ✓ Added event_galleries.image_key")
if 'file_size_bytes' not in event_galleries_columns:
op.add_column('event_galleries', sa.Column('file_size_bytes', sa.Integer(), nullable=False, server_default='0'))
print(" ✓ Added event_galleries.file_size_bytes")
try:
op.alter_column('event_galleries', 'image_url', type_=sa.String(), postgresql_using='image_url::varchar')
print(" ✓ Changed TEXT to VARCHAR")
except Exception as e:
print(f" ⚠️ Warning: Type conversion failed: {e}")
# Note: uploaded_by column already has correct nullable=False in both DEV and PROD
# ============================================================
# 6. FIX BYLAWS_DOCUMENTS TABLE
# ============================================================
print("\n[6/14] Fixing bylaws_documents table...")
bylaws_columns = {col['name'] for col in inspector.get_columns('bylaws_documents')}
# Remove PROD-only column
if 'updated_at' in bylaws_columns:
op.drop_column('bylaws_documents', 'updated_at')
print(" ✓ Removed bylaws_documents.updated_at (PROD-only)")
try:
op.alter_column('bylaws_documents', 'title', type_=sa.String(), postgresql_using='title::varchar')
op.alter_column('bylaws_documents', 'version', type_=sa.String(), postgresql_using='version::varchar')
op.alter_column('bylaws_documents', 'document_url', type_=sa.String(), postgresql_using='document_url::varchar')
op.alter_column('bylaws_documents', 'document_type', type_=sa.String(), postgresql_using='document_type::varchar')
print(" ✓ Changed column types")
except Exception as e:
print(f" ⚠️ Warning: Type conversion failed: {e}")
op.alter_column('bylaws_documents', 'document_type', nullable=True)
print(" ✓ Fixed nullable constraint")
# ============================================================
# 7. FIX EVENTS TABLE
# ============================================================
print("\n[7/14] Fixing events table...")
try:
op.alter_column('events', 'title', type_=sa.String(), postgresql_using='title::varchar')
op.alter_column('events', 'location', type_=sa.String(), postgresql_using='location::varchar')
op.alter_column('events', 'calendar_uid', type_=sa.String(), postgresql_using='calendar_uid::varchar')
print(" ✓ Changed VARCHAR(n) to VARCHAR")
except Exception as e:
print(f" ⚠️ Warning: {e}")
op.alter_column('events', 'location', nullable=False)
op.alter_column('events', 'created_by', nullable=False)
print(" ✓ Fixed nullable constraints")
# ============================================================
# 8. FIX PERMISSIONS TABLE
# ============================================================
print("\n[8/14] Fixing permissions table...")
try:
op.alter_column('permissions', 'code', type_=sa.String(), postgresql_using='code::varchar')
op.alter_column('permissions', 'name', type_=sa.String(), postgresql_using='name::varchar')
op.alter_column('permissions', 'module', type_=sa.String(), postgresql_using='module::varchar')
print(" ✓ Changed VARCHAR(n) to VARCHAR")
except Exception as e:
print(f" ⚠️ Warning: {e}")
op.alter_column('permissions', 'module', nullable=False)
print(" ✓ Fixed nullable constraint")
# ============================================================
# 9. FIX ROLES TABLE
# ============================================================
print("\n[9/14] Fixing roles table...")
try:
op.alter_column('roles', 'code', type_=sa.String(), postgresql_using='code::varchar')
op.alter_column('roles', 'name', type_=sa.String(), postgresql_using='name::varchar')
print(" ✓ Changed VARCHAR(n) to VARCHAR")
except Exception as e:
print(f" ⚠️ Warning: {e}")
op.alter_column('roles', 'is_system_role', nullable=False)
print(" ✓ Fixed nullable constraint")
# ============================================================
# 10. FIX USER_INVITATIONS TABLE
# ============================================================
print("\n[10/14] Fixing user_invitations table...")
try:
op.alter_column('user_invitations', 'email', type_=sa.String(), postgresql_using='email::varchar')
op.alter_column('user_invitations', 'token', type_=sa.String(), postgresql_using='token::varchar')
print(" ✓ Changed VARCHAR(n) to VARCHAR")
except Exception as e:
print(f" ⚠️ Warning: {e}")
op.alter_column('user_invitations', 'invited_at', nullable=False)
print(" ✓ Fixed nullable constraint")
# ============================================================
# 11. FIX NEWSLETTER_ARCHIVES TABLE
# ============================================================
print("\n[11/14] Fixing newsletter_archives table...")
try:
op.alter_column('newsletter_archives', 'title', type_=sa.String(), postgresql_using='title::varchar')
op.alter_column('newsletter_archives', 'document_url', type_=sa.String(), postgresql_using='document_url::varchar')
op.alter_column('newsletter_archives', 'document_type', type_=sa.String(), postgresql_using='document_type::varchar')
print(" ✓ Changed column types")
except Exception as e:
print(f" ⚠️ Warning: {e}")
op.alter_column('newsletter_archives', 'document_type', nullable=True)
print(" ✓ Fixed nullable constraint")
# ============================================================
# 12. FIX FINANCIAL_REPORTS TABLE
# ============================================================
print("\n[12/14] Fixing financial_reports table...")
try:
op.alter_column('financial_reports', 'title', type_=sa.String(), postgresql_using='title::varchar')
op.alter_column('financial_reports', 'document_url', type_=sa.String(), postgresql_using='document_url::varchar')
op.alter_column('financial_reports', 'document_type', type_=sa.String(), postgresql_using='document_type::varchar')
print(" ✓ Changed column types")
except Exception as e:
print(f" ⚠️ Warning: {e}")
op.alter_column('financial_reports', 'document_type', nullable=True)
print(" ✓ Fixed nullable constraint")
# ============================================================
# 13. FIX IMPORT_JOBS TABLE
# ============================================================
print("\n[13/14] Fixing import_jobs table...")
try:
op.alter_column('import_jobs', 'filename', type_=sa.String(), postgresql_using='filename::varchar')
op.alter_column('import_jobs', 'file_key', type_=sa.String(), postgresql_using='file_key::varchar')
print(" ✓ Changed VARCHAR(n) to VARCHAR")
# Change JSONB to JSON
op.alter_column('import_jobs', 'errors', type_=JSON(), postgresql_using='errors::json')
print(" ✓ Changed errors JSONB to JSON")
except Exception as e:
print(f" ⚠️ Warning: {e}")
# Fix nullable constraints
op.alter_column('import_jobs', 'processed_rows', nullable=False)
op.alter_column('import_jobs', 'successful_rows', nullable=False)
op.alter_column('import_jobs', 'failed_rows', nullable=False)
op.alter_column('import_jobs', 'errors', nullable=False)
op.alter_column('import_jobs', 'started_at', nullable=False)
print(" ✓ Fixed nullable constraints")
# ============================================================
# 14. FIX SUBSCRIPTION_PLANS TABLE
# ============================================================
print("\n[14/14] Fixing subscription_plans table...")
try:
op.alter_column('subscription_plans', 'name', type_=sa.String(), postgresql_using='name::varchar')
op.alter_column('subscription_plans', 'billing_cycle', type_=sa.String(), postgresql_using='billing_cycle::varchar')
op.alter_column('subscription_plans', 'stripe_price_id', type_=sa.String(), postgresql_using='stripe_price_id::varchar')
print(" ✓ Changed VARCHAR(n) to VARCHAR")
except Exception as e:
print(f" ⚠️ Warning: {e}")
op.alter_column('subscription_plans', 'minimum_price_cents', nullable=False)
print(" ✓ Fixed nullable constraint")
print("\n✅ Schema alignment complete! PROD now matches DEV (source of truth)")
def downgrade() -> None:
"""Revert alignment changes (not recommended)"""
print("⚠️ Downgrade not supported for alignment migration")
print(" To revert, restore from backup")
pass

View File

@@ -1,170 +0,0 @@
"""fix_remaining_differences
Revision ID: 012_fix_remaining
Revises: 011_align_prod_dev
Create Date: 2026-01-05
Fixes the last 5 schema differences found after migration 011:
1-2. import_rollback_audit nullable constraints (PROD)
3-4. role_permissions type and nullable (PROD)
5. UserStatus enum values (DEV - remove deprecated values)
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import ENUM
# revision identifiers, used by Alembic.
revision: str = '012_fix_remaining'
down_revision: Union[str, None] = '011_align_prod_dev'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Fix remaining schema differences"""
from sqlalchemy import inspect
conn = op.get_bind()
inspector = inspect(conn)
print("Fixing remaining schema differences...")
# ============================================================
# 1. FIX IMPORT_ROLLBACK_AUDIT TABLE (PROD only)
# ============================================================
print("\n[1/3] Fixing import_rollback_audit nullable constraints...")
# Check if there are any NULL values first
try:
null_count = conn.execute(sa.text("""
SELECT COUNT(*) FROM import_rollback_audit
WHERE created_at IS NULL OR rolled_back_at IS NULL
""")).scalar()
if null_count > 0:
# Fill NULLs with current timestamp
conn.execute(sa.text("""
UPDATE import_rollback_audit
SET created_at = NOW() WHERE created_at IS NULL
"""))
conn.execute(sa.text("""
UPDATE import_rollback_audit
SET rolled_back_at = NOW() WHERE rolled_back_at IS NULL
"""))
print(f" ✓ Filled {null_count} NULL timestamps")
# Now set NOT NULL
op.alter_column('import_rollback_audit', 'created_at', nullable=False)
op.alter_column('import_rollback_audit', 'rolled_back_at', nullable=False)
print(" ✓ Set NOT NULL constraints")
except Exception as e:
print(f" ⚠️ Warning: {e}")
# ============================================================
# 2. FIX ROLE_PERMISSIONS TABLE (PROD only)
# ============================================================
print("\n[2/3] Fixing role_permissions.role type and nullable...")
try:
# Change VARCHAR(50) to VARCHAR(10) to match UserRole enum
op.alter_column('role_permissions', 'role',
type_=sa.String(10),
postgresql_using='role::varchar(10)')
print(" ✓ Changed VARCHAR(50) to VARCHAR(10)")
# Set NOT NULL
op.alter_column('role_permissions', 'role', nullable=False)
print(" ✓ Set NOT NULL constraint")
except Exception as e:
print(f" ⚠️ Warning: {e}")
# ============================================================
# 3. FIX USERSTATUS ENUM (DEV only - remove deprecated values)
# ============================================================
print("\n[3/3] Fixing UserStatus enum values...")
try:
# First, check if the enum has deprecated values
enum_values = conn.execute(sa.text("""
SELECT enumlabel
FROM pg_enum
WHERE enumtypid = (
SELECT oid FROM pg_type WHERE typname = 'userstatus'
)
""")).fetchall()
enum_values_list = [row[0] for row in enum_values]
has_deprecated = 'pending_approval' in enum_values_list or 'pre_approved' in enum_values_list
if not has_deprecated:
print(" ✓ UserStatus enum already correct (no deprecated values)")
else:
print(" ⏳ Found deprecated enum values, migrating...")
# Check if any users have deprecated status values
deprecated_count = conn.execute(sa.text("""
SELECT COUNT(*) FROM users
WHERE status IN ('pending_approval', 'pre_approved')
""")).scalar()
if deprecated_count > 0:
print(f" ⏳ Migrating {deprecated_count} users with deprecated status values...")
# Migrate deprecated values to new equivalents
conn.execute(sa.text("""
UPDATE users
SET status = 'pre_validated'
WHERE status = 'pre_approved'
"""))
conn.execute(sa.text("""
UPDATE users
SET status = 'payment_pending'
WHERE status = 'pending_approval'
"""))
print(" ✓ Migrated deprecated status values")
else:
print(" ✓ No users with deprecated status values")
# Now remove deprecated enum values
# PostgreSQL doesn't support removing enum values directly,
# so we need to recreate the enum
conn.execute(sa.text("""
-- Create new enum with correct values (matches models.py)
CREATE TYPE userstatus_new AS ENUM (
'pending_email',
'pending_validation',
'pre_validated',
'payment_pending',
'active',
'inactive',
'canceled',
'expired',
'rejected',
'abandoned'
);
-- Update column to use new enum
ALTER TABLE users
ALTER COLUMN status TYPE userstatus_new
USING status::text::userstatus_new;
-- Drop old enum and rename new one
DROP TYPE userstatus;
ALTER TYPE userstatus_new RENAME TO userstatus;
"""))
print(" ✓ Updated UserStatus enum (removed deprecated values)")
except Exception as e:
print(f" ⚠️ Warning: Enum update failed (may already be correct): {e}")
print("\n✅ All remaining differences fixed!")
def downgrade() -> None:
"""Revert fixes (not recommended)"""
print("⚠️ Downgrade not supported")
pass

View File

@@ -1,147 +0,0 @@
"""sync_role_permissions
Revision ID: 013_sync_permissions
Revises: 012_fix_remaining
Create Date: 2026-01-05
Syncs role_permissions between DEV and PROD bidirectionally.
- Adds 18 DEV-only permissions to PROD (new features)
- Adds 6 PROD-only permissions to DEV (operational/security)
Result: Both environments have identical 142 permission mappings
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '013_sync_permissions'
down_revision: Union[str, None] = '012_fix_remaining'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Sync role_permissions bidirectionally"""
from sqlalchemy import text
conn = op.get_bind()
print("Syncing role_permissions between environments...")
# ============================================================
# STEP 1: Add missing permissions to ensure all exist
# ============================================================
print("\n[1/2] Ensuring all permissions exist...")
# Permissions that should exist (union of both environments)
all_permissions = [
# From DEV-only list
('donations.export', 'Export Donations', 'donations'),
('donations.view', 'View Donations', 'donations'),
('financials.create', 'Create Financial Reports', 'financials'),
('financials.delete', 'Delete Financial Reports', 'financials'),
('financials.edit', 'Edit Financial Reports', 'financials'),
('financials.export', 'Export Financial Reports', 'financials'),
('financials.payments', 'Manage Financial Payments', 'financials'),
('settings.edit', 'Edit Settings', 'settings'),
('settings.email_templates', 'Manage Email Templates', 'settings'),
('subscriptions.activate', 'Activate Subscriptions', 'subscriptions'),
('subscriptions.cancel', 'Cancel Subscriptions', 'subscriptions'),
('subscriptions.create', 'Create Subscriptions', 'subscriptions'),
('subscriptions.edit', 'Edit Subscriptions', 'subscriptions'),
('subscriptions.export', 'Export Subscriptions', 'subscriptions'),
('subscriptions.plans', 'Manage Subscription Plans', 'subscriptions'),
('subscriptions.view', 'View Subscriptions', 'subscriptions'),
('events.calendar_export', 'Export Event Calendar', 'events'),
('events.rsvps', 'View Event RSVPs', 'events'),
# From PROD-only list
('permissions.audit', 'Audit Permissions', 'permissions'),
('permissions.view', 'View Permissions', 'permissions'),
('settings.backup', 'Manage Backups', 'settings'),
]
for code, name, module in all_permissions:
# Insert if not exists
conn.execute(text(f"""
INSERT INTO permissions (id, code, name, description, module, created_at)
SELECT
gen_random_uuid(),
'{code}',
'{name}',
'{name}',
'{module}',
NOW()
WHERE NOT EXISTS (
SELECT 1 FROM permissions WHERE code = '{code}'
)
"""))
print(" ✓ Ensured all permissions exist")
# ============================================================
# STEP 2: Add missing role-permission mappings
# ============================================================
print("\n[2/2] Adding missing role-permission mappings...")
# Mappings that should exist (union of both environments)
role_permission_mappings = [
# DEV-only (add to PROD)
('admin', 'donations.export'),
('admin', 'donations.view'),
('admin', 'financials.create'),
('admin', 'financials.delete'),
('admin', 'financials.edit'),
('admin', 'financials.export'),
('admin', 'financials.payments'),
('admin', 'settings.edit'),
('admin', 'settings.email_templates'),
('admin', 'subscriptions.activate'),
('admin', 'subscriptions.cancel'),
('admin', 'subscriptions.create'),
('admin', 'subscriptions.edit'),
('admin', 'subscriptions.export'),
('admin', 'subscriptions.plans'),
('admin', 'subscriptions.view'),
('member', 'events.calendar_export'),
('member', 'events.rsvps'),
# PROD-only (add to DEV)
('admin', 'permissions.audit'),
('admin', 'permissions.view'),
('admin', 'settings.backup'),
('finance', 'bylaws.view'),
('finance', 'events.view'),
('finance', 'newsletters.view'),
]
added_count = 0
for role, perm_code in role_permission_mappings:
result = conn.execute(text(f"""
INSERT INTO role_permissions (id, role, permission_id, created_at)
SELECT
gen_random_uuid(),
'{role}',
p.id,
NOW()
FROM permissions p
WHERE p.code = '{perm_code}'
AND NOT EXISTS (
SELECT 1 FROM role_permissions rp
WHERE rp.role = '{role}'
AND rp.permission_id = p.id
)
RETURNING id
"""))
if result.rowcount > 0:
added_count += 1
print(f" ✓ Added {added_count} missing role-permission mappings")
# Verify final count
final_count = conn.execute(text("SELECT COUNT(*) FROM role_permissions")).scalar()
print(f"\n✅ Role-permission mappings synchronized: {final_count} total")
def downgrade() -> None:
"""Revert sync (not recommended)"""
print("⚠️ Downgrade not supported - permissions are additive")
pass

View File

@@ -1,48 +0,0 @@
"""add_role_audit_fields
Revision ID: 4fa11836f7fd
Revises: 013_sync_permissions
Create Date: 2026-01-16 17:21:40.514605
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import UUID
# revision identifiers, used by Alembic.
revision: str = '4fa11836f7fd'
down_revision: Union[str, None] = '013_sync_permissions'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Add role audit trail columns
op.add_column('users', sa.Column('role_changed_at', sa.DateTime(timezone=True), nullable=True))
op.add_column('users', sa.Column('role_changed_by', UUID(as_uuid=True), nullable=True))
# Create foreign key constraint to track who changed the role
op.create_foreign_key(
'fk_users_role_changed_by',
'users', 'users',
['role_changed_by'], ['id'],
ondelete='SET NULL'
)
# Create index for efficient querying by role change date
op.create_index('idx_users_role_changed_at', 'users', ['role_changed_at'])
def downgrade() -> None:
# Drop index first
op.drop_index('idx_users_role_changed_at')
# Drop foreign key constraint
op.drop_constraint('fk_users_role_changed_by', 'users', type_='foreignkey')
# Drop columns
op.drop_column('users', 'role_changed_by')
op.drop_column('users', 'role_changed_at')

View File

@@ -1,68 +0,0 @@
"""add_system_settings_table
Revision ID: ec4cb4a49cde
Revises: 4fa11836f7fd
Create Date: 2026-01-16 18:16:00.283455
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import UUID
# revision identifiers, used by Alembic.
revision: str = 'ec4cb4a49cde'
down_revision: Union[str, None] = '4fa11836f7fd'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Create enum for setting types (only if not exists)
op.execute("""
DO $$ BEGIN
CREATE TYPE settingtype AS ENUM ('plaintext', 'encrypted', 'json');
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
""")
# Create system_settings table
op.execute("""
CREATE TABLE system_settings (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
setting_key VARCHAR(100) UNIQUE NOT NULL,
setting_value TEXT,
setting_type settingtype NOT NULL DEFAULT 'plaintext'::settingtype,
description TEXT,
updated_by UUID REFERENCES users(id) ON DELETE SET NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
is_sensitive BOOLEAN NOT NULL DEFAULT FALSE
);
COMMENT ON COLUMN system_settings.setting_key IS 'Unique setting identifier (e.g., stripe_secret_key)';
COMMENT ON COLUMN system_settings.setting_value IS 'Setting value (encrypted if setting_type is encrypted)';
COMMENT ON COLUMN system_settings.setting_type IS 'Type of setting: plaintext, encrypted, or json';
COMMENT ON COLUMN system_settings.description IS 'Human-readable description of the setting';
COMMENT ON COLUMN system_settings.updated_by IS 'User who last updated this setting';
COMMENT ON COLUMN system_settings.is_sensitive IS 'Whether this setting contains sensitive data';
""")
# Create indexes
op.create_index('idx_system_settings_key', 'system_settings', ['setting_key'])
op.create_index('idx_system_settings_updated_at', 'system_settings', ['updated_at'])
def downgrade() -> None:
# Drop indexes
op.drop_index('idx_system_settings_updated_at')
op.drop_index('idx_system_settings_key')
# Drop table
op.drop_table('system_settings')
# Drop enum
op.execute('DROP TYPE IF EXISTS settingtype')

View File

@@ -1,345 +0,0 @@
#!/usr/bin/env python3
"""
Database Integrity Checker
Compares schema and data integrity between development and production databases
"""
import sys
from sqlalchemy import create_engine, inspect, text
from sqlalchemy.engine import reflection
import json
from collections import defaultdict
# Database URLs
DEV_DB = "postgresql://postgres:RchhcpaUKZuZuMOvB5kwCP1weLBnAG6tNMXE5FHdk8AwCvolBMALYFVYRM7WCl9x@10.9.23.11:5001/membership_demo"
PROD_DB = "postgresql://postgres:fDv3fRvMgfPueDWDUxj27NJVaynsewIdh6b2Hb28tcvG3Ew6mhscASg2kulx4tr7@10.9.23.11:54321/loaf_new"
def get_db_info(engine, label):
"""Get comprehensive database information"""
inspector = inspect(engine)
info = {
'label': label,
'tables': {},
'indexes': {},
'foreign_keys': {},
'sequences': [],
'enums': []
}
# Get all table names
table_names = inspector.get_table_names()
for table_name in table_names:
# Get columns
columns = inspector.get_columns(table_name)
info['tables'][table_name] = {
'columns': {
col['name']: {
'type': str(col['type']),
'nullable': col['nullable'],
'default': str(col.get('default', None)),
'autoincrement': col.get('autoincrement', False)
}
for col in columns
},
'column_count': len(columns)
}
# Get primary keys
pk = inspector.get_pk_constraint(table_name)
info['tables'][table_name]['primary_key'] = pk.get('constrained_columns', [])
# Get indexes
indexes = inspector.get_indexes(table_name)
info['indexes'][table_name] = [
{
'name': idx['name'],
'columns': idx['column_names'],
'unique': idx['unique']
}
for idx in indexes
]
# Get foreign keys
fks = inspector.get_foreign_keys(table_name)
info['foreign_keys'][table_name] = [
{
'name': fk.get('name'),
'columns': fk['constrained_columns'],
'referred_table': fk['referred_table'],
'referred_columns': fk['referred_columns']
}
for fk in fks
]
# Get sequences
with engine.connect() as conn:
result = conn.execute(text("""
SELECT sequence_name
FROM information_schema.sequences
WHERE sequence_schema = 'public'
"""))
info['sequences'] = [row[0] for row in result]
# Get enum types
result = conn.execute(text("""
SELECT t.typname as enum_name,
array_agg(e.enumlabel ORDER BY e.enumsortorder) as enum_values
FROM pg_type t
JOIN pg_enum e ON t.oid = e.enumtypid
WHERE t.typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = 'public')
GROUP BY t.typname
"""))
info['enums'] = {row[0]: row[1] for row in result}
return info
def compare_tables(dev_info, prod_info):
"""Compare tables between databases"""
dev_tables = set(dev_info['tables'].keys())
prod_tables = set(prod_info['tables'].keys())
print("\n" + "="*80)
print("TABLE COMPARISON")
print("="*80)
# Tables only in dev
dev_only = dev_tables - prod_tables
if dev_only:
print(f"\n❌ Tables only in DEV ({len(dev_only)}):")
for table in sorted(dev_only):
print(f" - {table}")
# Tables only in prod
prod_only = prod_tables - dev_tables
if prod_only:
print(f"\n❌ Tables only in PROD ({len(prod_only)}):")
for table in sorted(prod_only):
print(f" - {table}")
# Common tables
common = dev_tables & prod_tables
print(f"\n✅ Common tables: {len(common)}")
return common
def compare_columns(dev_info, prod_info, common_tables):
"""Compare columns for common tables"""
print("\n" + "="*80)
print("COLUMN COMPARISON")
print("="*80)
issues = []
for table in sorted(common_tables):
dev_cols = set(dev_info['tables'][table]['columns'].keys())
prod_cols = set(prod_info['tables'][table]['columns'].keys())
dev_only = dev_cols - prod_cols
prod_only = prod_cols - dev_cols
if dev_only or prod_only:
print(f"\n⚠️ Table '{table}' has column differences:")
if dev_only:
print(f" Columns only in DEV: {', '.join(sorted(dev_only))}")
issues.append(f"{table}: DEV-only columns: {', '.join(dev_only)}")
if prod_only:
print(f" Columns only in PROD: {', '.join(sorted(prod_only))}")
issues.append(f"{table}: PROD-only columns: {', '.join(prod_only)}")
# Compare column types for common columns
common_cols = dev_cols & prod_cols
for col in common_cols:
dev_col = dev_info['tables'][table]['columns'][col]
prod_col = prod_info['tables'][table]['columns'][col]
if dev_col['type'] != prod_col['type']:
print(f" ⚠️ Column '{col}' type mismatch:")
print(f" DEV: {dev_col['type']}")
print(f" PROD: {prod_col['type']}")
issues.append(f"{table}.{col}: Type mismatch")
if dev_col['nullable'] != prod_col['nullable']:
print(f" ⚠️ Column '{col}' nullable mismatch:")
print(f" DEV: {dev_col['nullable']}")
print(f" PROD: {prod_col['nullable']}")
issues.append(f"{table}.{col}: Nullable mismatch")
if not issues:
print("\n✅ All columns match between DEV and PROD")
return issues
def compare_enums(dev_info, prod_info):
"""Compare enum types"""
print("\n" + "="*80)
print("ENUM TYPE COMPARISON")
print("="*80)
dev_enums = set(dev_info['enums'].keys())
prod_enums = set(prod_info['enums'].keys())
dev_only = dev_enums - prod_enums
prod_only = prod_enums - dev_enums
issues = []
if dev_only:
print(f"\n❌ Enums only in DEV: {', '.join(sorted(dev_only))}")
issues.extend([f"Enum '{e}' only in DEV" for e in dev_only])
if prod_only:
print(f"\n❌ Enums only in PROD: {', '.join(sorted(prod_only))}")
issues.extend([f"Enum '{e}' only in PROD" for e in prod_only])
# Compare enum values for common enums
common = dev_enums & prod_enums
for enum_name in sorted(common):
dev_values = set(dev_info['enums'][enum_name])
prod_values = set(prod_info['enums'][enum_name])
if dev_values != prod_values:
print(f"\n⚠️ Enum '{enum_name}' values differ:")
print(f" DEV: {', '.join(sorted(dev_values))}")
print(f" PROD: {', '.join(sorted(prod_values))}")
issues.append(f"Enum '{enum_name}' values differ")
if not issues:
print("\n✅ All enum types match")
return issues
def check_migration_history(dev_engine, prod_engine):
"""Check Alembic migration history"""
print("\n" + "="*80)
print("MIGRATION HISTORY")
print("="*80)
try:
with dev_engine.connect() as dev_conn:
dev_result = dev_conn.execute(text("SELECT version_num FROM alembic_version"))
dev_version = dev_result.fetchone()
dev_version = dev_version[0] if dev_version else None
with prod_engine.connect() as prod_conn:
prod_result = prod_conn.execute(text("SELECT version_num FROM alembic_version"))
prod_version = prod_result.fetchone()
prod_version = prod_version[0] if prod_version else None
print(f"\nDEV migration version: {dev_version}")
print(f"PROD migration version: {prod_version}")
if dev_version == prod_version:
print("✅ Migration versions match")
return []
else:
print("❌ Migration versions DO NOT match")
return ["Migration versions differ"]
except Exception as e:
print(f"⚠️ Could not check migration history: {str(e)}")
return [f"Migration check failed: {str(e)}"]
def get_row_counts(engine, tables):
"""Get row counts for all tables"""
counts = {}
with engine.connect() as conn:
for table in tables:
result = conn.execute(text(f"SELECT COUNT(*) FROM {table}"))
counts[table] = result.fetchone()[0]
return counts
def compare_data_counts(dev_engine, prod_engine, common_tables):
"""Compare row counts between databases"""
print("\n" + "="*80)
print("DATA ROW COUNTS")
print("="*80)
print("\nGetting DEV row counts...")
dev_counts = get_row_counts(dev_engine, common_tables)
print("Getting PROD row counts...")
prod_counts = get_row_counts(prod_engine, common_tables)
print(f"\n{'Table':<30} {'DEV':<15} {'PROD':<15} {'Diff':<15}")
print("-" * 75)
for table in sorted(common_tables):
dev_count = dev_counts[table]
prod_count = prod_counts[table]
diff = dev_count - prod_count
diff_str = f"+{diff}" if diff > 0 else str(diff)
status = "⚠️ " if abs(diff) > 0 else ""
print(f"{status} {table:<28} {dev_count:<15} {prod_count:<15} {diff_str:<15}")
def main():
print("\n" + "="*80)
print("DATABASE INTEGRITY CHECKER")
print("="*80)
print(f"\nDEV: {DEV_DB.split('@')[1]}") # Hide password
print(f"PROD: {PROD_DB.split('@')[1]}")
try:
# Connect to databases
print("\n🔌 Connecting to databases...")
dev_engine = create_engine(DEV_DB)
prod_engine = create_engine(PROD_DB)
# Test connections
with dev_engine.connect() as conn:
conn.execute(text("SELECT 1"))
print("✅ Connected to DEV database")
with prod_engine.connect() as conn:
conn.execute(text("SELECT 1"))
print("✅ Connected to PROD database")
# Get database info
print("\n📊 Gathering database information...")
dev_info = get_db_info(dev_engine, "DEV")
prod_info = get_db_info(prod_engine, "PROD")
# Run comparisons
all_issues = []
common_tables = compare_tables(dev_info, prod_info)
column_issues = compare_columns(dev_info, prod_info, common_tables)
all_issues.extend(column_issues)
enum_issues = compare_enums(dev_info, prod_info)
all_issues.extend(enum_issues)
migration_issues = check_migration_history(dev_engine, prod_engine)
all_issues.extend(migration_issues)
compare_data_counts(dev_engine, prod_engine, common_tables)
# Summary
print("\n" + "="*80)
print("SUMMARY")
print("="*80)
if all_issues:
print(f"\n❌ Found {len(all_issues)} integrity issues:")
for i, issue in enumerate(all_issues, 1):
print(f" {i}. {issue}")
print("\n⚠️ Databases are NOT in sync!")
sys.exit(1)
else:
print("\n✅ Databases are in sync!")
print("✅ No integrity issues found")
sys.exit(0)
except Exception as e:
print(f"\n❌ Error: {str(e)}")
import traceback
traceback.print_exc()
sys.exit(1)
if __name__ == "__main__":
main()

View File

@@ -1,7 +1,6 @@
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy.pool import QueuePool
import os
from dotenv import load_dotenv
from pathlib import Path
@@ -11,21 +10,7 @@ load_dotenv(ROOT_DIR / '.env')
DATABASE_URL = os.environ.get('DATABASE_URL', 'postgresql://user:password@localhost:5432/membership_db')
# Configure engine with connection pooling and connection health checks
engine = create_engine(
DATABASE_URL,
poolclass=QueuePool,
pool_size=5, # Keep 5 connections open
max_overflow=10, # Allow up to 10 extra connections during peak
pool_pre_ping=True, # CRITICAL: Test connections before using them
pool_recycle=3600, # Recycle connections every hour (prevents stale connections)
echo=False, # Set to True for SQL debugging
connect_args={
'connect_timeout': 10, # Timeout connection attempts after 10 seconds
'options': '-c statement_timeout=30000' # 30 second query timeout
}
)
engine = create_engine(DATABASE_URL)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()

View File

@@ -1,14 +0,0 @@
services:
backend:
build:
context: .
dockerfile: Dockerfile # Use Dockerfile.prod for production
ports:
- "8000:8000"
env_file:
- .env
environment:
DATABASE_URL: ${DATABASE_URL}
volumes:
- .:/app # sync code for hot reload

View File

@@ -1,122 +0,0 @@
"""
Encryption service for sensitive settings stored in database.
Uses Fernet symmetric encryption (AES-128 in CBC mode with HMAC authentication).
The encryption key is derived from a master secret stored in .env.
"""
import os
import base64
from cryptography.fernet import Fernet
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
from cryptography.hazmat.backends import default_backend
class EncryptionService:
"""Service for encrypting and decrypting sensitive configuration values"""
def __init__(self):
# Get master encryption key from environment
# This should be a long, random string (e.g., 64 characters)
# Generate one with: python -c "import secrets; print(secrets.token_urlsafe(64))"
self.master_secret = os.environ.get('SETTINGS_ENCRYPTION_KEY')
if not self.master_secret:
raise ValueError(
"SETTINGS_ENCRYPTION_KEY environment variable not set. "
"Generate one with: python -c \"import secrets; print(secrets.token_urlsafe(64))\""
)
# Derive encryption key from master secret using PBKDF2HMAC
# This adds an extra layer of security
kdf = PBKDF2HMAC(
algorithm=hashes.SHA256(),
length=32,
salt=b'systemsettings', # Fixed salt (OK for key derivation from strong secret)
iterations=100000,
backend=default_backend()
)
key = base64.urlsafe_b64encode(kdf.derive(self.master_secret.encode()))
self.cipher = Fernet(key)
def encrypt(self, plaintext: str) -> str:
"""
Encrypt a plaintext string.
Args:
plaintext: The string to encrypt
Returns:
Base64-encoded encrypted string
"""
if not plaintext:
return ""
encrypted_bytes = self.cipher.encrypt(plaintext.encode())
return encrypted_bytes.decode('utf-8')
def decrypt(self, encrypted: str) -> str:
"""
Decrypt an encrypted string.
Args:
encrypted: The base64-encoded encrypted string
Returns:
Decrypted plaintext string
Raises:
cryptography.fernet.InvalidToken: If decryption fails (wrong key or corrupted data)
"""
if not encrypted:
return ""
decrypted_bytes = self.cipher.decrypt(encrypted.encode())
return decrypted_bytes.decode('utf-8')
def is_encrypted(self, value: str) -> bool:
"""
Check if a value appears to be encrypted (starts with Fernet token format).
This is a heuristic check - not 100% reliable but useful for validation.
Args:
value: String to check
Returns:
True if value looks like a Fernet token
"""
if not value:
return False
# Fernet tokens are base64-encoded and start with version byte (gAAAAA...)
# They're always > 60 characters
try:
return len(value) > 60 and value.startswith('gAAAAA')
except:
return False
# Global encryption service instance
# Initialize on module import so it fails fast if encryption key is missing
try:
encryption_service = EncryptionService()
except ValueError as e:
print(f"WARNING: {e}")
print("Encryption service will not be available.")
encryption_service = None
def get_encryption_service() -> EncryptionService:
"""
Get the global encryption service instance.
Raises:
ValueError: If encryption service is not initialized (missing SETTINGS_ENCRYPTION_KEY)
"""
if encryption_service is None:
raise ValueError(
"Encryption service not initialized. Set SETTINGS_ENCRYPTION_KEY environment variable."
)
return encryption_service

View File

@@ -137,10 +137,6 @@ class User(Base):
wordpress_user_id = Column(BigInteger, nullable=True, comment="Original WordPress user ID")
wordpress_registered_date = Column(DateTime(timezone=True), nullable=True, comment="Original WordPress registration date")
# Role Change Audit Trail
role_changed_at = Column(DateTime(timezone=True), nullable=True, comment="Timestamp when role was last changed")
role_changed_by = Column(UUID(as_uuid=True), ForeignKey('users.id', ondelete='SET NULL'), nullable=True, comment="Admin who changed the role")
created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc))
updated_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
@@ -149,7 +145,6 @@ class User(Base):
events_created = relationship("Event", back_populates="creator")
rsvps = relationship("EventRSVP", back_populates="user")
subscriptions = relationship("Subscription", back_populates="user", foreign_keys="Subscription.user_id")
role_changer = relationship("User", foreign_keys=[role_changed_by], remote_side="User.id", post_update=True)
class Event(Base):
__tablename__ = "events"
@@ -514,36 +509,3 @@ class ImportRollbackAudit(Base):
# Relationships
import_job = relationship("ImportJob")
admin_user = relationship("User", foreign_keys=[rolled_back_by])
# ============================================================
# System Settings Models
# ============================================================
class SettingType(enum.Enum):
plaintext = "plaintext"
encrypted = "encrypted"
json = "json"
class SystemSettings(Base):
"""System-wide configuration settings stored in database"""
__tablename__ = "system_settings"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
setting_key = Column(String(100), unique=True, nullable=False, index=True)
setting_value = Column(Text, nullable=True)
setting_type = Column(SQLEnum(SettingType), default=SettingType.plaintext, nullable=False)
description = Column(Text, nullable=True)
updated_by = Column(UUID(as_uuid=True), ForeignKey("users.id", ondelete="SET NULL"), nullable=True)
created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), nullable=False)
updated_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc), nullable=False)
is_sensitive = Column(Boolean, default=False, nullable=False)
# Relationships
updater = relationship("User", foreign_keys=[updated_by])
# Index on updated_at for audit queries
__table_args__ = (
Index('idx_system_settings_updated_at', 'updated_at'),
)

View File

@@ -11,9 +11,11 @@ from datetime import datetime, timezone, timedelta
# Load environment variables
load_dotenv()
# NOTE: Stripe credentials are now database-driven
# These .env fallbacks are kept for backward compatibility only
# The actual credentials are loaded dynamically from system_settings table
# Initialize Stripe with secret key
stripe.api_key = os.getenv("STRIPE_SECRET_KEY")
# Stripe webhook secret for signature verification
STRIPE_WEBHOOK_SECRET = os.getenv("STRIPE_WEBHOOK_SECRET")
def create_checkout_session(
user_id: str,
@@ -21,15 +23,11 @@ def create_checkout_session(
plan_id: str,
stripe_price_id: str,
success_url: str,
cancel_url: str,
db = None
cancel_url: str
):
"""
Create a Stripe Checkout session for subscription payment.
Args:
db: Database session (optional, for reading Stripe credentials from database)
Args:
user_id: User's UUID
user_email: User's email address
@@ -41,28 +39,6 @@ def create_checkout_session(
Returns:
dict: Checkout session object with session ID and URL
"""
# Load Stripe API key from database if available
if db:
try:
# Import here to avoid circular dependency
from models import SystemSettings, SettingType
from encryption_service import get_encryption_service
setting = db.query(SystemSettings).filter(
SystemSettings.setting_key == 'stripe_secret_key'
).first()
if setting and setting.setting_value:
encryption_service = get_encryption_service()
stripe.api_key = encryption_service.decrypt(setting.setting_value)
except Exception as e:
# Fallback to .env if database read fails
print(f"Failed to read Stripe key from database: {e}")
stripe.api_key = os.getenv("STRIPE_SECRET_KEY")
else:
# Fallback to .env if no db session
stripe.api_key = os.getenv("STRIPE_SECRET_KEY")
try:
# Create Checkout Session
checkout_session = stripe.checkout.Session.create(
@@ -98,14 +74,13 @@ def create_checkout_session(
raise Exception(f"Stripe error: {str(e)}")
def verify_webhook_signature(payload: bytes, sig_header: str, db=None) -> dict:
def verify_webhook_signature(payload: bytes, sig_header: str) -> dict:
"""
Verify Stripe webhook signature and construct event.
Args:
payload: Raw webhook payload bytes
sig_header: Stripe signature header
db: Database session (optional, for reading webhook secret from database)
Returns:
dict: Verified webhook event
@@ -113,32 +88,9 @@ def verify_webhook_signature(payload: bytes, sig_header: str, db=None) -> dict:
Raises:
ValueError: If signature verification fails
"""
# Load webhook secret from database if available
webhook_secret = None
if db:
try:
from models import SystemSettings
from encryption_service import get_encryption_service
setting = db.query(SystemSettings).filter(
SystemSettings.setting_key == 'stripe_webhook_secret'
).first()
if setting and setting.setting_value:
encryption_service = get_encryption_service()
webhook_secret = encryption_service.decrypt(setting.setting_value)
except Exception as e:
print(f"Failed to read webhook secret from database: {e}")
webhook_secret = os.getenv("STRIPE_WEBHOOK_SECRET")
else:
webhook_secret = os.getenv("STRIPE_WEBHOOK_SECRET")
if not webhook_secret:
raise ValueError("STRIPE_WEBHOOK_SECRET not configured")
try:
event = stripe.Webhook.construct_event(
payload, sig_header, webhook_secret
payload, sig_header, STRIPE_WEBHOOK_SECRET
)
return event
except ValueError as e:

View File

@@ -31,7 +31,7 @@ motor==3.3.1
msal==1.27.0
mypy==1.18.2
mypy_extensions==1.1.0
numpy==2.2.6
numpy==2.3.5
oauthlib==3.3.1
packaging==25.0
pandas==2.3.3

745
server.py
View File

@@ -60,29 +60,11 @@ async def lifespan(app: FastAPI):
# Shutdown
logger.info("Application shutdown")
# Environment detection
ENVIRONMENT = os.environ.get('ENVIRONMENT', 'development')
IS_PRODUCTION = ENVIRONMENT == 'production'
# Security: Disable API documentation in production
if IS_PRODUCTION:
print("🔒 Production mode: API documentation disabled")
app_config = {
"lifespan": lifespan,
"root_path": "/membership",
"docs_url": None, # Disable /docs
"redoc_url": None, # Disable /redoc
"openapi_url": None # Disable /openapi.json
}
else:
print("🔓 Development mode: API documentation enabled at /docs and /redoc")
app_config = {
"lifespan": lifespan,
"root_path": "/membership"
}
# Create the main app
app = FastAPI(**app_config)
app = FastAPI(
lifespan=lifespan,
root_path="/membership" # Configure for serving under /membership path
)
# Create a router with the /api prefix
api_router = APIRouter(prefix="/api")
@@ -382,9 +364,6 @@ class AttendanceUpdate(BaseModel):
user_id: str
attended: bool
class BatchAttendanceUpdate(BaseModel):
updates: list[AttendanceUpdate]
class UpdateUserStatusRequest(BaseModel):
status: str
@@ -514,10 +493,6 @@ class AcceptInvitationRequest(BaseModel):
zipcode: Optional[str] = None
date_of_birth: Optional[datetime] = None
class ChangeRoleRequest(BaseModel):
role: str
role_id: Optional[str] = None # For custom roles
# Auth Routes
@api_router.post("/auth/register")
async def register(request: RegisterRequest, db: Session = Depends(get_db)):
@@ -812,53 +787,6 @@ async def get_config():
"max_file_size_mb": int(max_file_size_mb)
}
@api_router.get("/diagnostics/cors")
async def cors_diagnostics(request: Request):
"""
CORS Diagnostics Endpoint
Shows current CORS configuration and request details for debugging
Use this to verify:
1. What origins are allowed
2. What origin is making the request
3. Whether CORS is properly configured
"""
cors_origins_env = os.environ.get('CORS_ORIGINS', '')
if cors_origins_env:
configured_origins = [origin.strip() for origin in cors_origins_env.split(',')]
cors_status = "✅ CONFIGURED"
else:
configured_origins = [
"http://localhost:3000",
"http://localhost:8000",
"http://127.0.0.1:3000",
"http://127.0.0.1:8000"
]
cors_status = "⚠️ NOT CONFIGURED (using defaults)"
request_origin = request.headers.get('origin', 'None')
origin_allowed = request_origin in configured_origins
return {
"cors_status": cors_status,
"environment": ENVIRONMENT,
"cors_origins_env_variable": cors_origins_env or "(not set)",
"allowed_origins": configured_origins,
"request_origin": request_origin,
"origin_allowed": origin_allowed,
"diagnosis": {
"cors_configured": bool(cors_origins_env),
"origin_matches": origin_allowed,
"issue": None if origin_allowed else f"Origin '{request_origin}' is not in allowed origins list"
},
"fix_instructions": None if origin_allowed else (
f"Add to backend .env file:\n"
f"CORS_ORIGINS={request_origin}"
f"{(',' + ','.join(configured_origins)) if cors_origins_env else ''}"
)
}
# User Profile Routes
@api_router.get("/users/profile", response_model=UserResponse)
async def get_profile(current_user: User = Depends(get_current_user)):
@@ -1571,14 +1499,7 @@ async def get_events(
EventRSVP.rsvp_status == RSVPStatus.yes
).count()
# Get current user's RSVP status for this event
user_rsvp = db.query(EventRSVP).filter(
EventRSVP.event_id == event.id,
EventRSVP.user_id == current_user.id
).first()
user_rsvp_status = user_rsvp.rsvp_status.value if user_rsvp else None
# No user_rsvp_status in public endpoint
result.append(EventResponse(
id=str(event.id),
title=event.title,
@@ -1591,7 +1512,7 @@ async def get_events(
created_by=str(event.created_by),
created_at=event.created_at,
rsvp_count=rsvp_count,
user_rsvp_status=user_rsvp_status
user_rsvp_status=None
))
return result
@@ -1611,13 +1532,8 @@ async def get_event(
EventRSVP.rsvp_status == RSVPStatus.yes
).count()
# Get current user's RSVP status for this event
user_rsvp = db.query(EventRSVP).filter(
EventRSVP.event_id == event_id,
EventRSVP.user_id == current_user.id
).first()
user_rsvp_status = user_rsvp.rsvp_status.value if user_rsvp else None
# No user_rsvp_status in public endpoint
user_rsvp = None
return EventResponse(
id=str(event.id),
@@ -1631,7 +1547,7 @@ async def get_event(
created_by=str(event.created_by),
created_at=event.created_at,
rsvp_count=rsvp_count,
user_rsvp_status=user_rsvp_status
user_rsvp_status=user_rsvp
)
@api_router.post("/events/{event_id}/rsvp")
@@ -1702,9 +1618,7 @@ async def get_my_event_activity(
}
# Separate upcoming vs past events
# Ensure timezone-aware comparison
event_end_at = event.end_at.replace(tzinfo=timezone.utc) if event.end_at.tzinfo is None else event.end_at
if event_end_at > now:
if event.end_at > now:
upcoming_events.append(event_data)
else:
past_events.append(event_data)
@@ -2531,102 +2445,6 @@ async def admin_reset_user_password(
return {"message": f"Password reset for {user.email}. Temporary password emailed."}
@api_router.put("/admin/users/{user_id}/role")
async def change_user_role(
user_id: str,
request: ChangeRoleRequest,
current_user: User = Depends(require_permission("users.edit")),
db: Session = Depends(get_db)
):
"""
Change an existing user's role with privilege escalation prevention.
Requires: users.edit permission
Rules:
- Superadmin: Can assign any role (including superadmin)
- Admin: Can assign admin, finance, member, guest, and non-elevated custom roles
- Admin CANNOT assign: superadmin or custom roles with elevated permissions
- Users CANNOT change their own role
"""
# 1. Fetch target user
target_user = db.query(User).filter(User.id == user_id).first()
if not target_user:
raise HTTPException(status_code=404, detail="User not found")
# 2. Prevent self-role-change
if str(target_user.id) == str(current_user.id):
raise HTTPException(
status_code=403,
detail="You cannot change your own role"
)
# 3. Validate new role
if request.role not in ['guest', 'member', 'admin', 'finance', 'superadmin']:
raise HTTPException(status_code=400, detail="Invalid role")
# 4. Privilege escalation check
if current_user.role != 'superadmin':
# Non-superadmin cannot assign superadmin role
if request.role == 'superadmin':
raise HTTPException(
status_code=403,
detail="Only superadmin can assign superadmin role"
)
# Check custom role elevation
if request.role_id:
custom_role = db.query(Role).filter(Role.id == request.role_id).first()
if not custom_role:
raise HTTPException(status_code=404, detail="Custom role not found")
# Check if custom role has elevated permissions
elevated_permissions = ['users.delete', 'roles.create', 'roles.edit',
'roles.delete', 'permissions.edit']
role_perms = db.query(Permission.name).join(RolePermission).filter(
RolePermission.role_id == custom_role.id,
Permission.name.in_(elevated_permissions)
).all()
if role_perms:
raise HTTPException(
status_code=403,
detail=f"Cannot assign role with elevated permissions: {custom_role.name}"
)
# 5. Update role with audit trail
old_role = target_user.role
old_role_id = target_user.role_id
target_user.role = request.role
target_user.role_id = request.role_id if request.role_id else None
target_user.role_changed_at = datetime.now(timezone.utc)
target_user.role_changed_by = current_user.id
target_user.updated_at = datetime.now(timezone.utc)
db.commit()
db.refresh(target_user)
# Log admin action
logger.info(
f"Admin {current_user.email} changed role for user {target_user.email} "
f"from {old_role} to {request.role}"
)
return {
"message": f"Role changed from {old_role} to {request.role}",
"user": {
"id": str(target_user.id),
"email": target_user.email,
"name": f"{target_user.first_name} {target_user.last_name}",
"old_role": old_role,
"new_role": target_user.role,
"changed_by": f"{current_user.first_name} {current_user.last_name}",
"changed_at": target_user.role_changed_at.isoformat()
}
}
@api_router.post("/admin/users/{user_id}/resend-verification")
async def admin_resend_verification(
user_id: str,
@@ -2996,9 +2814,8 @@ async def verify_invitation_token(
if not invitation:
raise HTTPException(status_code=404, detail="Invalid or expired invitation token")
# Check expiry (handle timezone-naive datetime from DB)
expires_at_aware = invitation.expires_at.replace(tzinfo=timezone.utc) if invitation.expires_at.tzinfo is None else invitation.expires_at
if expires_at_aware < datetime.now(timezone.utc):
# Check expiry
if invitation.expires_at < datetime.now(timezone.utc):
invitation.status = InvitationStatus.expired
db.commit()
raise HTTPException(status_code=400, detail="Invitation has expired")
@@ -3030,9 +2847,8 @@ async def accept_invitation(
if not invitation:
raise HTTPException(status_code=404, detail="Invalid or expired invitation token")
# Check expiry (handle timezone-naive datetime from DB)
expires_at_aware = invitation.expires_at.replace(tzinfo=timezone.utc) if invitation.expires_at.tzinfo is None else invitation.expires_at
if expires_at_aware < datetime.now(timezone.utc):
# Check expiry
if invitation.expires_at < datetime.now(timezone.utc):
invitation.status = InvitationStatus.expired
db.commit()
raise HTTPException(status_code=400, detail="Invitation has expired")
@@ -3978,37 +3794,6 @@ async def update_event(
return {"message": "Event updated successfully"}
@api_router.get("/admin/events/{event_id}", response_model=EventResponse)
async def get_admin_event(
event_id: str,
current_user: User = Depends(require_permission("events.view")),
db: Session = Depends(get_db)
):
"""Get single event details (admin) - allows viewing unpublished events"""
event = db.query(Event).filter(Event.id == event_id).first()
if not event:
raise HTTPException(status_code=404, detail="Event not found")
rsvp_count = db.query(EventRSVP).filter(
EventRSVP.event_id == event.id,
EventRSVP.rsvp_status == RSVPStatus.yes
).count()
return EventResponse(
id=str(event.id),
title=event.title,
description=event.description,
start_at=event.start_at,
end_at=event.end_at,
location=event.location,
capacity=event.capacity,
published=event.published,
created_by=str(event.created_by),
created_at=event.created_at,
rsvp_count=rsvp_count,
user_rsvp_status=None
)
@api_router.get("/admin/events/{event_id}/rsvps")
async def get_event_rsvps(
event_id: str,
@@ -4039,53 +3824,46 @@ async def get_event_rsvps(
@api_router.put("/admin/events/{event_id}/attendance")
async def mark_attendance(
event_id: str,
request: BatchAttendanceUpdate,
request: AttendanceUpdate,
current_user: User = Depends(require_permission("events.attendance")),
db: Session = Depends(get_db)
):
"""Mark attendance for one or more users (supports batch updates)"""
event = db.query(Event).filter(Event.id == event_id).first()
if not event:
raise HTTPException(status_code=404, detail="Event not found")
updated_count = 0
rsvp = db.query(EventRSVP).filter(
EventRSVP.event_id == event_id,
EventRSVP.user_id == request.user_id
).first()
# Process each update in the batch
for update in request.updates:
rsvp = db.query(EventRSVP).filter(
EventRSVP.event_id == event_id,
EventRSVP.user_id == update.user_id
).first()
# Auto-create RSVP if it doesn't exist (for retroactive attendance marking)
if not rsvp:
rsvp = EventRSVP(
event_id=event_id,
user_id=request.user_id,
rsvp_status=RSVPStatus.yes, # Default to 'yes' for attended events
attended=False,
created_at=datetime.now(timezone.utc),
updated_at=datetime.now(timezone.utc)
)
db.add(rsvp)
db.flush() # Get the ID without committing
# Auto-create RSVP if it doesn't exist (for retroactive attendance marking)
if not rsvp:
rsvp = EventRSVP(
event_id=event_id,
user_id=update.user_id,
rsvp_status=RSVPStatus.yes, # Default to 'yes' for attended events
attended=False,
created_at=datetime.now(timezone.utc),
updated_at=datetime.now(timezone.utc)
)
db.add(rsvp)
db.flush() # Get the ID without committing
rsvp.attended = request.attended
rsvp.attended_at = datetime.now(timezone.utc) if request.attended else None
rsvp.updated_at = datetime.now(timezone.utc)
rsvp.attended = update.attended
rsvp.attended_at = datetime.now(timezone.utc) if update.attended else None
rsvp.updated_at = datetime.now(timezone.utc)
# If user attended and they were pending validation, update their status
if update.attended:
user = db.query(User).filter(User.id == update.user_id).first()
if user and user.status == UserStatus.pending_validation:
user.status = UserStatus.pre_validated
user.updated_at = datetime.now(timezone.utc)
updated_count += 1
# If user attended and they were pending validation, update their status
if request.attended:
user = db.query(User).filter(User.id == request.user_id).first()
if user and user.status == UserStatus.pending_validation:
user.status = UserStatus.pre_validated
user.updated_at = datetime.now(timezone.utc)
db.commit()
return {"message": f"Attendance marked successfully for {updated_count} {'person' if updated_count == 1 else 'people'}"}
return {"message": "Attendance marked successfully"}
@api_router.get("/admin/events")
async def get_admin_events(
@@ -5407,101 +5185,6 @@ async def get_all_roles(
for role, count in roles_with_counts
]
@api_router.get("/admin/roles/assignable", response_model=List[RoleResponse])
async def get_assignable_roles(
current_user: User = Depends(require_permission("users.create")),
db: Session = Depends(get_db)
):
"""
Get roles that the current user can assign when inviting staff
- Superadmin: Can assign all roles
- Admin: Can assign admin, finance, and non-elevated custom roles
- Returns roles filtered by user's permission level
"""
from sqlalchemy import func
# Query all roles with permission counts
roles_query = db.query(
Role,
func.count(RolePermission.id).label('permission_count')
).outerjoin(RolePermission, Role.id == RolePermission.role_id)\
.group_by(Role.id)\
.order_by(Role.is_system_role.desc(), Role.name)
all_roles = roles_query.all()
# Superadmin can assign any role
if current_user.role == UserRole.superadmin:
return [
{
"id": str(role.id),
"code": role.code,
"name": role.name,
"description": role.description,
"is_system_role": role.is_system_role,
"created_at": role.created_at,
"updated_at": role.updated_at,
"permission_count": count
}
for role, count in all_roles
]
# Admin users can assign: admin, finance, and non-elevated custom roles
# Get admin role's permissions to check for elevation
admin_role = db.query(Role).filter(Role.code == "admin").first()
admin_permission_codes = set()
if admin_role:
admin_permissions = db.query(RolePermission).filter(
RolePermission.role_id == admin_role.id
).all()
admin_permission_codes = {rp.permission_id for rp in admin_permissions}
assignable_roles = []
for role, count in all_roles:
# Always exclude superadmin role
if role.code == "superadmin":
continue
# Include system roles: admin and finance
if role.is_system_role and role.code in ["admin", "finance"]:
assignable_roles.append({
"id": str(role.id),
"code": role.code,
"name": role.name,
"description": role.description,
"is_system_role": role.is_system_role,
"created_at": role.created_at,
"updated_at": role.updated_at,
"permission_count": count
})
continue
# For custom roles, check if they're elevated
if not role.is_system_role:
role_permissions = db.query(RolePermission).filter(
RolePermission.role_id == role.id
).all()
role_permission_ids = {rp.permission_id for rp in role_permissions}
# Check if custom role has permissions admin doesn't have (elevated)
has_elevated_permissions = bool(role_permission_ids - admin_permission_codes)
# Only include non-elevated custom roles
if not has_elevated_permissions:
assignable_roles.append({
"id": str(role.id),
"code": role.code,
"name": role.name,
"description": role.description,
"is_system_role": role.is_system_role,
"created_at": role.created_at,
"updated_at": role.updated_at,
"permission_count": count
})
return assignable_roles
@api_router.post("/admin/roles", response_model=RoleResponse)
async def create_role(
request: CreateRoleRequest,
@@ -6297,8 +5980,8 @@ async def stripe_webhook(request: Request, db: Session = Depends(get_db)):
raise HTTPException(status_code=400, detail="Missing stripe-signature header")
try:
# Verify webhook signature (pass db for reading webhook secret from database)
event = verify_webhook_signature(payload, sig_header, db)
# Verify webhook signature
event = verify_webhook_signature(payload, sig_header)
except ValueError as e:
logger.error(f"Webhook signature verification failed: {str(e)}")
raise HTTPException(status_code=400, detail=str(e))
@@ -6398,351 +6081,13 @@ async def stripe_webhook(request: Request, db: Session = Depends(get_db)):
return {"status": "success"}
# ============================================================================
# ADMIN SETTINGS ENDPOINTS
# ============================================================================
# Helper functions for system settings
def get_setting(db: Session, key: str, decrypt: bool = False) -> str | None:
"""
Get a system setting value from database.
Args:
db: Database session
key: Setting key to retrieve
decrypt: If True and setting_type is 'encrypted', decrypt the value
Returns:
Setting value or None if not found
"""
from models import SystemSettings, SettingType
from encryption_service import get_encryption_service
setting = db.query(SystemSettings).filter(SystemSettings.setting_key == key).first()
if not setting:
return None
value = setting.setting_value
if decrypt and setting.setting_type == SettingType.encrypted and value:
try:
encryption_service = get_encryption_service()
value = encryption_service.decrypt(value)
except Exception as e:
print(f"Failed to decrypt setting {key}: {e}")
return None
return value
def set_setting(
db: Session,
key: str,
value: str,
user_id: str,
setting_type: str = "plaintext",
description: str = None,
is_sensitive: bool = False,
encrypt: bool = False
) -> None:
"""
Set a system setting value in database.
Args:
db: Database session
key: Setting key
value: Setting value
user_id: ID of user making the change
setting_type: Type of setting (plaintext, encrypted, json)
description: Human-readable description
is_sensitive: Whether this is sensitive data
encrypt: If True, encrypt the value before storing
"""
from models import SystemSettings, SettingType
from encryption_service import get_encryption_service
# Encrypt value if requested
if encrypt and value:
encryption_service = get_encryption_service()
value = encryption_service.encrypt(value)
setting_type = "encrypted"
# Find or create setting
setting = db.query(SystemSettings).filter(SystemSettings.setting_key == key).first()
if setting:
# Update existing
setting.setting_value = value
setting.setting_type = SettingType[setting_type]
setting.updated_by = user_id
setting.updated_at = datetime.now(timezone.utc)
if description:
setting.description = description
setting.is_sensitive = is_sensitive
else:
# Create new
setting = SystemSettings(
setting_key=key,
setting_value=value,
setting_type=SettingType[setting_type],
description=description,
updated_by=user_id,
is_sensitive=is_sensitive
)
db.add(setting)
db.commit()
@api_router.get("/admin/settings/stripe/status")
async def get_stripe_status(
current_user: User = Depends(get_current_superadmin),
db: Session = Depends(get_db)
):
"""
Get Stripe integration status (superadmin only).
Returns:
- configured: Whether credentials exist in database
- secret_key_prefix: First 10 chars of secret key (for verification)
- webhook_configured: Whether webhook secret exists
- environment: test or live (based on key prefix)
- webhook_url: Full webhook URL for Stripe configuration
"""
import os
# Read from database
secret_key = get_setting(db, 'stripe_secret_key', decrypt=True)
webhook_secret = get_setting(db, 'stripe_webhook_secret', decrypt=True)
configured = bool(secret_key)
environment = 'unknown'
if secret_key:
if secret_key.startswith('sk_test_'):
environment = 'test'
elif secret_key.startswith('sk_live_'):
environment = 'live'
# Get backend URL from environment for webhook URL
# Try multiple environment variable patterns for flexibility
backend_url = (
os.environ.get('BACKEND_URL') or
os.environ.get('API_URL') or
f"http://{os.environ.get('HOST', 'localhost')}:{os.environ.get('PORT', '8000')}"
)
webhook_url = f"{backend_url}/api/webhooks/stripe"
return {
"configured": configured,
"secret_key_prefix": secret_key[:10] if secret_key else None,
"secret_key_set": bool(secret_key),
"webhook_secret_set": bool(webhook_secret),
"environment": environment,
"webhook_url": webhook_url,
"instructions": {
"location": "Database (system_settings table)",
"required_settings": [
"stripe_secret_key (sk_test_... or sk_live_...)",
"stripe_webhook_secret (whsec_...)"
],
"restart_required": "No - changes take effect immediately"
}
}
@api_router.post("/admin/settings/stripe/test-connection")
async def test_stripe_connection(
current_user: User = Depends(get_current_superadmin),
db: Session = Depends(get_db)
):
"""
Test Stripe API connection (superadmin only).
Performs a simple API call to verify credentials work.
"""
import stripe
# Read from database
secret_key = get_setting(db, 'stripe_secret_key', decrypt=True)
if not secret_key:
raise HTTPException(
status_code=400,
detail="STRIPE_SECRET_KEY not configured in database. Please configure Stripe settings first."
)
try:
stripe.api_key = secret_key
# Make a simple API call to test connection
balance = stripe.Balance.retrieve()
return {
"success": True,
"message": "Stripe connection successful",
"environment": "test" if secret_key.startswith('sk_test_') else "live",
"balance": {
"available": balance.available,
"pending": balance.pending
}
}
except stripe.error.AuthenticationError as e:
raise HTTPException(
status_code=401,
detail=f"Stripe authentication failed: {str(e)}"
)
except Exception as e:
raise HTTPException(
status_code=500,
detail=f"Stripe connection test failed: {str(e)}"
)
class UpdateStripeSettingsRequest(BaseModel):
"""Request model for updating Stripe settings"""
secret_key: str = Field(..., min_length=1, description="Stripe secret key (sk_test_... or sk_live_...)")
webhook_secret: str = Field(..., min_length=1, description="Stripe webhook secret (whsec_...)")
@api_router.put("/admin/settings/stripe")
async def update_stripe_settings(
request: UpdateStripeSettingsRequest,
current_user: User = Depends(get_current_superadmin),
db: Session = Depends(get_db)
):
"""
Update Stripe integration settings (superadmin only).
Stores Stripe credentials encrypted in the database.
Changes take effect immediately without server restart.
"""
# Validate secret key format
if not (request.secret_key.startswith('sk_test_') or request.secret_key.startswith('sk_live_')):
raise HTTPException(
status_code=400,
detail="Invalid Stripe secret key format. Must start with 'sk_test_' or 'sk_live_'"
)
# Validate webhook secret format
if not request.webhook_secret.startswith('whsec_'):
raise HTTPException(
status_code=400,
detail="Invalid Stripe webhook secret format. Must start with 'whsec_'"
)
try:
# Store secret key (encrypted)
set_setting(
db=db,
key='stripe_secret_key',
value=request.secret_key,
user_id=str(current_user.id),
description='Stripe API secret key for payment processing',
is_sensitive=True,
encrypt=True
)
# Store webhook secret (encrypted)
set_setting(
db=db,
key='stripe_webhook_secret',
value=request.webhook_secret,
user_id=str(current_user.id),
description='Stripe webhook secret for verifying webhook signatures',
is_sensitive=True,
encrypt=True
)
# Determine environment
environment = 'test' if request.secret_key.startswith('sk_test_') else 'live'
return {
"success": True,
"message": "Stripe settings updated successfully",
"environment": environment,
"updated_at": datetime.now(timezone.utc).isoformat(),
"updated_by": f"{current_user.first_name} {current_user.last_name}"
}
except Exception as e:
raise HTTPException(
status_code=500,
detail=f"Failed to update Stripe settings: {str(e)}"
)
# Include the router in the main app
app.include_router(api_router)
# ============================================================================
# MIDDLEWARE CONFIGURATION
# ============================================================================
# IMPORTANT: In FastAPI, middleware is executed in REVERSE order of addition
# Last added = First executed
# So we add them in this order: Security Headers -> CORS
# Execution order will be: CORS -> Security Headers
# Security Headers Middleware (Added first, executes second)
@app.middleware("http")
async def add_security_headers(request: Request, call_next):
response = await call_next(request)
# Security headers to protect against common vulnerabilities
security_headers = {
# Prevent clickjacking attacks
"X-Frame-Options": "DENY",
# Prevent MIME type sniffing
"X-Content-Type-Options": "nosniff",
# Enable XSS protection in older browsers
"X-XSS-Protection": "1; mode=block",
# Control referrer information
"Referrer-Policy": "strict-origin-when-cross-origin",
# Permissions policy (formerly Feature-Policy)
"Permissions-Policy": "geolocation=(), microphone=(), camera=()",
}
# Add HSTS header in production (force HTTPS)
if IS_PRODUCTION:
security_headers["Strict-Transport-Security"] = "max-age=31536000; includeSubDomains"
# Apply all security headers
for header, value in security_headers.items():
response.headers[header] = value
# Remove server identification headers (use del, not pop for MutableHeaders)
if "Server" in response.headers:
del response.headers["Server"]
return response
print(f"✓ Security headers configured (Production: {IS_PRODUCTION})")
# CORS Configuration (Added second, executes first)
cors_origins = os.environ.get('CORS_ORIGINS', '')
if cors_origins:
# Use explicitly configured origins
allowed_origins = [origin.strip() for origin in cors_origins.split(',')]
else:
# Default to common development origins if not configured
allowed_origins = [
"http://localhost:3000",
"http://localhost:8000",
"http://127.0.0.1:3000",
"http://127.0.0.1:8000"
]
print(f"⚠️ WARNING: CORS_ORIGINS not set. Using defaults: {allowed_origins}")
print("⚠️ For production, set CORS_ORIGINS in .env file!")
print(f"✓ CORS allowed origins: {allowed_origins}")
app.add_middleware(
CORSMiddleware,
allow_credentials=True,
allow_origins=allowed_origins,
allow_methods=["GET", "POST", "PUT", "DELETE", "OPTIONS", "PATCH"],
allow_origins=os.environ.get('CORS_ORIGINS', '*').split(','),
allow_methods=["*"],
allow_headers=["*"],
expose_headers=["*"],
max_age=600, # Cache preflight requests for 10 minutes
)

View File

@@ -1,10 +0,0 @@
#!/usr/bin/env bash
# Exit immediately if a command fails
set -e
# Activate virtual environment
source .venv/bin/activate
# Start the backend
python -m uvicorn server:app --reload --port 8000