Compare commits
38 Commits
a01a8b9915
...
docker
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5ab0038c0a | ||
|
|
e938baa78e | ||
|
|
39324ba6f6 | ||
|
|
adbfa7a3c8 | ||
|
|
a74f161efa | ||
|
|
d818d847bc | ||
|
|
1390e07500 | ||
|
|
38e5f5377a | ||
|
|
e06f18ce17 | ||
|
|
810366d00f | ||
|
|
84285861cc | ||
|
|
56d1b97261 | ||
|
|
314380eec6 | ||
|
|
2b82f4acd8 | ||
|
|
cca694766b | ||
|
|
0171546bba | ||
|
|
96aca7d39b | ||
|
|
82319509c3 | ||
|
|
2547758864 | ||
|
|
669d78beb5 | ||
|
|
df789612e8 | ||
|
|
9266521bf1 | ||
|
|
c8f4040244 | ||
|
|
c848d4240a | ||
|
|
6784148058 | ||
|
|
340f838925 | ||
|
|
d967d1934e | ||
|
|
03ae921a5f | ||
|
|
d5f2373143 | ||
|
|
7ad5bfb1e5 | ||
|
|
6b6173bd5b | ||
|
|
cf8d38a4a4 | ||
|
|
09712e52bb | ||
|
|
366245acc7 | ||
|
|
a75bf743f4 | ||
|
|
fb369977d0 | ||
|
|
1ed9aa0994 | ||
|
|
04783f66f1 |
13
.env.example
13
.env.example
@@ -6,6 +6,10 @@ JWT_SECRET=your-secret-key-change-this-in-production
|
||||
JWT_ALGORITHM=HS256
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES=30
|
||||
|
||||
# Settings Encryption (for database-stored sensitive settings)
|
||||
# Generate with: python -c "import secrets; print(secrets.token_urlsafe(64))"
|
||||
SETTINGS_ENCRYPTION_KEY=your-encryption-key-generate-with-command-above
|
||||
|
||||
# SMTP Email Configuration (Port 465 - SSL/TLS)
|
||||
SMTP_HOST=p.konceptkit.com
|
||||
SMTP_PORT=465
|
||||
@@ -28,7 +32,14 @@ SMTP_FROM_NAME=LOAF Membership
|
||||
# Frontend URL
|
||||
FRONTEND_URL=http://localhost:3000
|
||||
|
||||
# Stripe Configuration (for future payment integration)
|
||||
# Backend URL (for webhook URLs and API references)
|
||||
# Used to construct Stripe webhook URL shown in Admin Settings
|
||||
BACKEND_URL=http://localhost:8000
|
||||
|
||||
# Stripe Configuration (NOW DATABASE-DRIVEN via Admin Settings page)
|
||||
# Configure Stripe credentials through the Admin Settings UI (requires SETTINGS_ENCRYPTION_KEY)
|
||||
# No longer requires .env variables - managed through database for dynamic updates
|
||||
# Legacy .env variables below are deprecated:
|
||||
# STRIPE_SECRET_KEY=sk_test_...
|
||||
# STRIPE_WEBHOOK_SECRET=whsec_...
|
||||
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,3 +1,5 @@
|
||||
.env
|
||||
.venv
|
||||
# ============================================================================
|
||||
# Python Backend .gitignore
|
||||
# For FastAPI + PostgreSQL + Cloudflare R2 + Stripe
|
||||
@@ -8,6 +10,7 @@
|
||||
.env.*
|
||||
!.env.example
|
||||
.envrc
|
||||
.sh
|
||||
|
||||
# ===== Python =====
|
||||
# Byte-compiled / optimized / DLL files
|
||||
|
||||
20
Dockerfile
Normal file
20
Dockerfile
Normal file
@@ -0,0 +1,20 @@
|
||||
# Use an official Python image (Linux)
|
||||
FROM python:3.12-slim
|
||||
|
||||
# Set a working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Copy dependency list
|
||||
COPY requirements.txt .
|
||||
|
||||
# Install dependencies
|
||||
RUN pip3 install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy the rest of the project
|
||||
COPY . .
|
||||
|
||||
# Expose port (whatever your backend runs on)
|
||||
EXPOSE 8000
|
||||
|
||||
# Run exactly your command
|
||||
CMD ["python", "-m", "uvicorn", "server:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
|
||||
BIN
__pycache__/auth.cpython-310.pyc
Normal file
BIN
__pycache__/auth.cpython-310.pyc
Normal file
Binary file not shown.
BIN
__pycache__/calendar_service.cpython-310.pyc
Normal file
BIN
__pycache__/calendar_service.cpython-310.pyc
Normal file
Binary file not shown.
BIN
__pycache__/database.cpython-310.pyc
Normal file
BIN
__pycache__/database.cpython-310.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
__pycache__/email_service.cpython-310.pyc
Normal file
BIN
__pycache__/email_service.cpython-310.pyc
Normal file
Binary file not shown.
BIN
__pycache__/models.cpython-310.pyc
Normal file
BIN
__pycache__/models.cpython-310.pyc
Normal file
Binary file not shown.
BIN
__pycache__/payment_service.cpython-310.pyc
Normal file
BIN
__pycache__/payment_service.cpython-310.pyc
Normal file
Binary file not shown.
BIN
__pycache__/r2_storage.cpython-310.pyc
Normal file
BIN
__pycache__/r2_storage.cpython-310.pyc
Normal file
Binary file not shown.
BIN
__pycache__/server.cpython-310.pyc
Normal file
BIN
__pycache__/server.cpython-310.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
__pycache__/wordpress_parser.cpython-310.pyc
Normal file
BIN
__pycache__/wordpress_parser.cpython-310.pyc
Normal file
Binary file not shown.
@@ -24,31 +24,48 @@ depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Add missing user fields"""
|
||||
"""Add missing user fields (skip if already exists)"""
|
||||
from sqlalchemy import inspect
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
existing_columns = {col['name'] for col in inspector.get_columns('users')}
|
||||
|
||||
# Add scholarship_reason
|
||||
op.add_column('users', sa.Column('scholarship_reason', sa.Text(), nullable=True))
|
||||
if 'scholarship_reason' not in existing_columns:
|
||||
op.add_column('users', sa.Column('scholarship_reason', sa.Text(), nullable=True))
|
||||
|
||||
# Add directory fields
|
||||
op.add_column('users', sa.Column('directory_email', sa.String(), nullable=True))
|
||||
op.add_column('users', sa.Column('directory_bio', sa.Text(), nullable=True))
|
||||
op.add_column('users', sa.Column('directory_address', sa.String(), nullable=True))
|
||||
op.add_column('users', sa.Column('directory_phone', sa.String(), nullable=True))
|
||||
op.add_column('users', sa.Column('directory_dob', sa.DateTime(), nullable=True))
|
||||
op.add_column('users', sa.Column('directory_partner_name', sa.String(), nullable=True))
|
||||
if 'directory_email' not in existing_columns:
|
||||
op.add_column('users', sa.Column('directory_email', sa.String(), nullable=True))
|
||||
if 'directory_bio' not in existing_columns:
|
||||
op.add_column('users', sa.Column('directory_bio', sa.Text(), nullable=True))
|
||||
if 'directory_address' not in existing_columns:
|
||||
op.add_column('users', sa.Column('directory_address', sa.String(), nullable=True))
|
||||
if 'directory_phone' not in existing_columns:
|
||||
op.add_column('users', sa.Column('directory_phone', sa.String(), nullable=True))
|
||||
if 'directory_dob' not in existing_columns:
|
||||
op.add_column('users', sa.Column('directory_dob', sa.DateTime(), nullable=True))
|
||||
if 'directory_partner_name' not in existing_columns:
|
||||
op.add_column('users', sa.Column('directory_partner_name', sa.String(), nullable=True))
|
||||
|
||||
# Rename profile_image_url to profile_photo_url (for consistency with models.py)
|
||||
op.alter_column('users', 'profile_image_url', new_column_name='profile_photo_url')
|
||||
# Rename profile_image_url to profile_photo_url (skip if already renamed)
|
||||
if 'profile_image_url' in existing_columns and 'profile_photo_url' not in existing_columns:
|
||||
op.alter_column('users', 'profile_image_url', new_column_name='profile_photo_url')
|
||||
|
||||
# Add social media fields
|
||||
op.add_column('users', sa.Column('social_media_facebook', sa.String(), nullable=True))
|
||||
op.add_column('users', sa.Column('social_media_instagram', sa.String(), nullable=True))
|
||||
op.add_column('users', sa.Column('social_media_twitter', sa.String(), nullable=True))
|
||||
op.add_column('users', sa.Column('social_media_linkedin', sa.String(), nullable=True))
|
||||
if 'social_media_facebook' not in existing_columns:
|
||||
op.add_column('users', sa.Column('social_media_facebook', sa.String(), nullable=True))
|
||||
if 'social_media_instagram' not in existing_columns:
|
||||
op.add_column('users', sa.Column('social_media_instagram', sa.String(), nullable=True))
|
||||
if 'social_media_twitter' not in existing_columns:
|
||||
op.add_column('users', sa.Column('social_media_twitter', sa.String(), nullable=True))
|
||||
if 'social_media_linkedin' not in existing_columns:
|
||||
op.add_column('users', sa.Column('social_media_linkedin', sa.String(), nullable=True))
|
||||
|
||||
# Add email_verification_expires (exists in DB but not in models.py initially)
|
||||
# Check if it already exists, if not add it
|
||||
# This field should already exist from the initial schema, but adding for completeness
|
||||
# Add email_verification_expires if missing
|
||||
if 'email_verification_expires' not in existing_columns:
|
||||
op.add_column('users', sa.Column('email_verification_expires', sa.DateTime(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
|
||||
@@ -22,11 +22,24 @@ depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Add optional pre-filled information fields to user_invitations"""
|
||||
"""Add optional pre-filled information fields to user_invitations (skip if already exists)"""
|
||||
from sqlalchemy import inspect
|
||||
|
||||
op.add_column('user_invitations', sa.Column('first_name', sa.String(), nullable=True))
|
||||
op.add_column('user_invitations', sa.Column('last_name', sa.String(), nullable=True))
|
||||
op.add_column('user_invitations', sa.Column('phone', sa.String(), nullable=True))
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
existing_columns = {col['name'] for col in inspector.get_columns('user_invitations')}
|
||||
|
||||
# Add first_name if missing
|
||||
if 'first_name' not in existing_columns:
|
||||
op.add_column('user_invitations', sa.Column('first_name', sa.String(), nullable=True))
|
||||
|
||||
# Add last_name if missing
|
||||
if 'last_name' not in existing_columns:
|
||||
op.add_column('user_invitations', sa.Column('last_name', sa.String(), nullable=True))
|
||||
|
||||
# Add phone if missing
|
||||
if 'phone' not in existing_columns:
|
||||
op.add_column('user_invitations', sa.Column('phone', sa.String(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
|
||||
@@ -22,16 +22,26 @@ depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Add file_size_bytes column to document tables"""
|
||||
"""Add file_size_bytes column to document tables (skip if already exists)"""
|
||||
from sqlalchemy import inspect
|
||||
|
||||
# Add to newsletter_archives
|
||||
op.add_column('newsletter_archives', sa.Column('file_size_bytes', sa.Integer(), nullable=True))
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
|
||||
# Add to financial_reports
|
||||
op.add_column('financial_reports', sa.Column('file_size_bytes', sa.Integer(), nullable=True))
|
||||
# Add to newsletter_archives if missing
|
||||
existing_columns = {col['name'] for col in inspector.get_columns('newsletter_archives')}
|
||||
if 'file_size_bytes' not in existing_columns:
|
||||
op.add_column('newsletter_archives', sa.Column('file_size_bytes', sa.Integer(), nullable=True))
|
||||
|
||||
# Add to bylaws_documents
|
||||
op.add_column('bylaws_documents', sa.Column('file_size_bytes', sa.Integer(), nullable=True))
|
||||
# Add to financial_reports if missing
|
||||
existing_columns = {col['name'] for col in inspector.get_columns('financial_reports')}
|
||||
if 'file_size_bytes' not in existing_columns:
|
||||
op.add_column('financial_reports', sa.Column('file_size_bytes', sa.Integer(), nullable=True))
|
||||
|
||||
# Add to bylaws_documents if missing
|
||||
existing_columns = {col['name'] for col in inspector.get_columns('bylaws_documents')}
|
||||
if 'file_size_bytes' not in existing_columns:
|
||||
op.add_column('bylaws_documents', sa.Column('file_size_bytes', sa.Integer(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
|
||||
@@ -22,26 +22,44 @@ depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Add missing columns and fix naming"""
|
||||
"""Add missing columns and fix naming (skip if already exists)"""
|
||||
from sqlalchemy import inspect
|
||||
|
||||
# Add missing columns to subscriptions table
|
||||
op.add_column('subscriptions', sa.Column('start_date', sa.DateTime(timezone=True), nullable=True))
|
||||
op.add_column('subscriptions', sa.Column('end_date', sa.DateTime(timezone=True), nullable=True))
|
||||
op.add_column('subscriptions', sa.Column('amount_paid_cents', sa.Integer(), nullable=True))
|
||||
op.add_column('subscriptions', sa.Column('manual_payment_notes', sa.Text(), nullable=True))
|
||||
op.add_column('subscriptions', sa.Column('manual_payment_admin_id', UUID(as_uuid=True), nullable=True))
|
||||
op.add_column('subscriptions', sa.Column('manual_payment_date', sa.DateTime(timezone=True), nullable=True))
|
||||
op.add_column('subscriptions', sa.Column('payment_method', sa.String(50), nullable=True))
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
|
||||
# Add foreign key for manual_payment_admin_id
|
||||
op.create_foreign_key(
|
||||
'subscriptions_manual_payment_admin_id_fkey',
|
||||
'subscriptions', 'users',
|
||||
['manual_payment_admin_id'], ['id']
|
||||
)
|
||||
# Check existing columns in subscriptions table
|
||||
existing_columns = {col['name'] for col in inspector.get_columns('subscriptions')}
|
||||
|
||||
# Rename storage_usage.last_calculated_at to last_updated
|
||||
op.alter_column('storage_usage', 'last_calculated_at', new_column_name='last_updated')
|
||||
# Add missing columns to subscriptions table only if they don't exist
|
||||
if 'start_date' not in existing_columns:
|
||||
op.add_column('subscriptions', sa.Column('start_date', sa.DateTime(timezone=True), nullable=True))
|
||||
if 'end_date' not in existing_columns:
|
||||
op.add_column('subscriptions', sa.Column('end_date', sa.DateTime(timezone=True), nullable=True))
|
||||
if 'amount_paid_cents' not in existing_columns:
|
||||
op.add_column('subscriptions', sa.Column('amount_paid_cents', sa.Integer(), nullable=True))
|
||||
if 'manual_payment_notes' not in existing_columns:
|
||||
op.add_column('subscriptions', sa.Column('manual_payment_notes', sa.Text(), nullable=True))
|
||||
if 'manual_payment_admin_id' not in existing_columns:
|
||||
op.add_column('subscriptions', sa.Column('manual_payment_admin_id', UUID(as_uuid=True), nullable=True))
|
||||
if 'manual_payment_date' not in existing_columns:
|
||||
op.add_column('subscriptions', sa.Column('manual_payment_date', sa.DateTime(timezone=True), nullable=True))
|
||||
if 'payment_method' not in existing_columns:
|
||||
op.add_column('subscriptions', sa.Column('payment_method', sa.String(50), nullable=True))
|
||||
|
||||
# Add foreign key for manual_payment_admin_id if it doesn't exist
|
||||
existing_fks = [fk['name'] for fk in inspector.get_foreign_keys('subscriptions')]
|
||||
if 'subscriptions_manual_payment_admin_id_fkey' not in existing_fks:
|
||||
op.create_foreign_key(
|
||||
'subscriptions_manual_payment_admin_id_fkey',
|
||||
'subscriptions', 'users',
|
||||
['manual_payment_admin_id'], ['id']
|
||||
)
|
||||
|
||||
# Rename storage_usage.last_calculated_at to last_updated (only if needed)
|
||||
storage_columns = {col['name'] for col in inspector.get_columns('storage_usage')}
|
||||
if 'last_calculated_at' in storage_columns and 'last_updated' not in storage_columns:
|
||||
op.alter_column('storage_usage', 'last_calculated_at', new_column_name='last_updated')
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
|
||||
37
alembic/versions/006_rename_is_active.py
Normal file
37
alembic/versions/006_rename_is_active.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""rename_is_active
|
||||
|
||||
Revision ID: 006_rename_active
|
||||
Revises: 005_fix_subs_storage
|
||||
Create Date: 2026-01-04
|
||||
|
||||
Fixes:
|
||||
- Rename subscription_plans.is_active to active (match models.py)
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '006_rename_active'
|
||||
down_revision: Union[str, None] = '005_fix_subs_storage'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Rename is_active to active (skip if already renamed)"""
|
||||
from sqlalchemy import inspect
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
|
||||
# Check if rename is needed
|
||||
existing_columns = {col['name'] for col in inspector.get_columns('subscription_plans')}
|
||||
if 'is_active' in existing_columns and 'active' not in existing_columns:
|
||||
op.alter_column('subscription_plans', 'is_active', new_column_name='active')
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Rename back to is_active"""
|
||||
op.alter_column('subscription_plans', 'active', new_column_name='is_active')
|
||||
65
alembic/versions/007_add_subscription_plan_fields.py
Normal file
65
alembic/versions/007_add_subscription_plan_fields.py
Normal file
@@ -0,0 +1,65 @@
|
||||
"""add_subscription_plan_fields
|
||||
|
||||
Revision ID: 007_add_sub_fields
|
||||
Revises: 006_rename_active
|
||||
Create Date: 2026-01-04
|
||||
|
||||
Fixes:
|
||||
- Add missing columns to subscription_plans table
|
||||
(custom cycle fields, dynamic pricing fields)
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '007_add_sub_fields'
|
||||
down_revision: Union[str, None] = '006_rename_active'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Add missing columns to subscription_plans (skip if already exists)"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import inspect
|
||||
|
||||
# Get database connection
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
existing_columns = {col['name'] for col in inspector.get_columns('subscription_plans')}
|
||||
|
||||
# Custom billing cycle fields
|
||||
if 'custom_cycle_enabled' not in existing_columns:
|
||||
op.add_column('subscription_plans', sa.Column('custom_cycle_enabled', sa.Boolean(), nullable=False, server_default='false'))
|
||||
if 'custom_cycle_start_month' not in existing_columns:
|
||||
op.add_column('subscription_plans', sa.Column('custom_cycle_start_month', sa.Integer(), nullable=True))
|
||||
if 'custom_cycle_start_day' not in existing_columns:
|
||||
op.add_column('subscription_plans', sa.Column('custom_cycle_start_day', sa.Integer(), nullable=True))
|
||||
if 'custom_cycle_end_month' not in existing_columns:
|
||||
op.add_column('subscription_plans', sa.Column('custom_cycle_end_month', sa.Integer(), nullable=True))
|
||||
if 'custom_cycle_end_day' not in existing_columns:
|
||||
op.add_column('subscription_plans', sa.Column('custom_cycle_end_day', sa.Integer(), nullable=True))
|
||||
|
||||
# Dynamic pricing fields
|
||||
if 'minimum_price_cents' not in existing_columns:
|
||||
op.add_column('subscription_plans', sa.Column('minimum_price_cents', sa.Integer(), nullable=False, server_default='3000'))
|
||||
if 'suggested_price_cents' not in existing_columns:
|
||||
op.add_column('subscription_plans', sa.Column('suggested_price_cents', sa.Integer(), nullable=True))
|
||||
if 'allow_donation' not in existing_columns:
|
||||
op.add_column('subscription_plans', sa.Column('allow_donation', sa.Boolean(), nullable=False, server_default='true'))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Remove added columns (rollback)"""
|
||||
|
||||
op.drop_column('subscription_plans', 'allow_donation')
|
||||
op.drop_column('subscription_plans', 'suggested_price_cents')
|
||||
op.drop_column('subscription_plans', 'minimum_price_cents')
|
||||
op.drop_column('subscription_plans', 'custom_cycle_end_day')
|
||||
op.drop_column('subscription_plans', 'custom_cycle_end_month')
|
||||
op.drop_column('subscription_plans', 'custom_cycle_start_day')
|
||||
op.drop_column('subscription_plans', 'custom_cycle_start_month')
|
||||
op.drop_column('subscription_plans', 'custom_cycle_enabled')
|
||||
55
alembic/versions/008_add_donation_columns.py
Normal file
55
alembic/versions/008_add_donation_columns.py
Normal file
@@ -0,0 +1,55 @@
|
||||
"""add_donation_columns
|
||||
|
||||
Revision ID: 008_add_donations
|
||||
Revises: 007_add_sub_fields
|
||||
Create Date: 2026-01-04
|
||||
|
||||
Fixes:
|
||||
- Add missing Stripe payment columns to donations table
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import inspect
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '008_add_donations'
|
||||
down_revision: Union[str, None] = '007_add_sub_fields'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Add missing columns to donations table (skip if already exists)"""
|
||||
|
||||
# Get database connection
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
existing_columns = {col['name'] for col in inspector.get_columns('donations')}
|
||||
|
||||
# Stripe payment columns
|
||||
if 'stripe_checkout_session_id' not in existing_columns:
|
||||
op.add_column('donations', sa.Column('stripe_checkout_session_id', sa.String(), nullable=True))
|
||||
|
||||
if 'stripe_payment_intent_id' not in existing_columns:
|
||||
op.add_column('donations', sa.Column('stripe_payment_intent_id', sa.String(), nullable=True))
|
||||
|
||||
if 'payment_method' not in existing_columns:
|
||||
op.add_column('donations', sa.Column('payment_method', sa.String(), nullable=True))
|
||||
|
||||
if 'notes' not in existing_columns:
|
||||
op.add_column('donations', sa.Column('notes', sa.Text(), nullable=True))
|
||||
|
||||
if 'updated_at' not in existing_columns:
|
||||
op.add_column('donations', sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Remove added columns (rollback)"""
|
||||
|
||||
op.drop_column('donations', 'updated_at')
|
||||
op.drop_column('donations', 'notes')
|
||||
op.drop_column('donations', 'payment_method')
|
||||
op.drop_column('donations', 'stripe_payment_intent_id')
|
||||
op.drop_column('donations', 'stripe_checkout_session_id')
|
||||
237
alembic/versions/009_add_all_missing_columns.py
Normal file
237
alembic/versions/009_add_all_missing_columns.py
Normal file
@@ -0,0 +1,237 @@
|
||||
"""add_all_missing_columns
|
||||
|
||||
Revision ID: 009_add_all_missing
|
||||
Revises: 008_add_donations
|
||||
Create Date: 2026-01-04
|
||||
|
||||
Fixes:
|
||||
- Add ALL remaining missing columns across all tables
|
||||
- Users: newsletter preferences, volunteer, scholarship, directory, password reset, ToS, member_since, reminders, rejection, import tracking
|
||||
- Events: calendar_uid
|
||||
- Subscriptions: base_subscription_cents, donation_cents, manual_payment
|
||||
- ImportJobs: WordPress import fields
|
||||
- Create ImportRollbackAudit table if not exists
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy import inspect
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '009_add_all_missing'
|
||||
down_revision: Union[str, None] = '008_add_donations'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Add all missing columns across all tables"""
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
|
||||
# ============================================================
|
||||
# 1. USERS TABLE - Add ~28 missing columns
|
||||
# ============================================================
|
||||
users_columns = {col['name'] for col in inspector.get_columns('users')}
|
||||
|
||||
# Newsletter publication preferences
|
||||
if 'newsletter_publish_name' not in users_columns:
|
||||
op.add_column('users', sa.Column('newsletter_publish_name', sa.Boolean(), nullable=False, server_default='false'))
|
||||
if 'newsletter_publish_photo' not in users_columns:
|
||||
op.add_column('users', sa.Column('newsletter_publish_photo', sa.Boolean(), nullable=False, server_default='false'))
|
||||
if 'newsletter_publish_birthday' not in users_columns:
|
||||
op.add_column('users', sa.Column('newsletter_publish_birthday', sa.Boolean(), nullable=False, server_default='false'))
|
||||
if 'newsletter_publish_none' not in users_columns:
|
||||
op.add_column('users', sa.Column('newsletter_publish_none', sa.Boolean(), nullable=False, server_default='false'))
|
||||
|
||||
# Volunteer interests
|
||||
if 'volunteer_interests' not in users_columns:
|
||||
op.add_column('users', sa.Column('volunteer_interests', sa.JSON(), nullable=True, server_default='[]'))
|
||||
|
||||
# Scholarship
|
||||
if 'scholarship_requested' not in users_columns:
|
||||
op.add_column('users', sa.Column('scholarship_requested', sa.Boolean(), nullable=False, server_default='false'))
|
||||
|
||||
# Directory
|
||||
if 'show_in_directory' not in users_columns:
|
||||
op.add_column('users', sa.Column('show_in_directory', sa.Boolean(), nullable=False, server_default='false'))
|
||||
|
||||
# Password reset
|
||||
if 'password_reset_token' not in users_columns:
|
||||
op.add_column('users', sa.Column('password_reset_token', sa.String(), nullable=True))
|
||||
if 'password_reset_expires' not in users_columns:
|
||||
op.add_column('users', sa.Column('password_reset_expires', sa.DateTime(), nullable=True))
|
||||
if 'force_password_change' not in users_columns:
|
||||
op.add_column('users', sa.Column('force_password_change', sa.Boolean(), nullable=False, server_default='false'))
|
||||
|
||||
# Terms of Service
|
||||
if 'accepts_tos' not in users_columns:
|
||||
op.add_column('users', sa.Column('accepts_tos', sa.Boolean(), nullable=False, server_default='false'))
|
||||
if 'tos_accepted_at' not in users_columns:
|
||||
op.add_column('users', sa.Column('tos_accepted_at', sa.DateTime(), nullable=True))
|
||||
|
||||
# Member since
|
||||
if 'member_since' not in users_columns:
|
||||
op.add_column('users', sa.Column('member_since', sa.DateTime(), nullable=True))
|
||||
|
||||
# Email verification reminders
|
||||
if 'email_verification_reminders_sent' not in users_columns:
|
||||
op.add_column('users', sa.Column('email_verification_reminders_sent', sa.Integer(), nullable=False, server_default='0'))
|
||||
if 'last_email_verification_reminder_at' not in users_columns:
|
||||
op.add_column('users', sa.Column('last_email_verification_reminder_at', sa.DateTime(), nullable=True))
|
||||
|
||||
# Event attendance reminders
|
||||
if 'event_attendance_reminders_sent' not in users_columns:
|
||||
op.add_column('users', sa.Column('event_attendance_reminders_sent', sa.Integer(), nullable=False, server_default='0'))
|
||||
if 'last_event_attendance_reminder_at' not in users_columns:
|
||||
op.add_column('users', sa.Column('last_event_attendance_reminder_at', sa.DateTime(), nullable=True))
|
||||
|
||||
# Payment reminders
|
||||
if 'payment_reminders_sent' not in users_columns:
|
||||
op.add_column('users', sa.Column('payment_reminders_sent', sa.Integer(), nullable=False, server_default='0'))
|
||||
if 'last_payment_reminder_at' not in users_columns:
|
||||
op.add_column('users', sa.Column('last_payment_reminder_at', sa.DateTime(), nullable=True))
|
||||
|
||||
# Renewal reminders
|
||||
if 'renewal_reminders_sent' not in users_columns:
|
||||
op.add_column('users', sa.Column('renewal_reminders_sent', sa.Integer(), nullable=False, server_default='0'))
|
||||
if 'last_renewal_reminder_at' not in users_columns:
|
||||
op.add_column('users', sa.Column('last_renewal_reminder_at', sa.DateTime(), nullable=True))
|
||||
|
||||
# Rejection tracking
|
||||
if 'rejection_reason' not in users_columns:
|
||||
op.add_column('users', sa.Column('rejection_reason', sa.Text(), nullable=True))
|
||||
if 'rejected_at' not in users_columns:
|
||||
op.add_column('users', sa.Column('rejected_at', sa.DateTime(timezone=True), nullable=True))
|
||||
if 'rejected_by' not in users_columns:
|
||||
op.add_column('users', sa.Column('rejected_by', UUID(as_uuid=True), nullable=True))
|
||||
# Note: Foreign key constraint skipped to avoid circular dependency issues
|
||||
|
||||
# WordPress import tracking
|
||||
if 'import_source' not in users_columns:
|
||||
op.add_column('users', sa.Column('import_source', sa.String(50), nullable=True))
|
||||
if 'import_job_id' not in users_columns:
|
||||
op.add_column('users', sa.Column('import_job_id', UUID(as_uuid=True), nullable=True))
|
||||
# Note: Foreign key will be added after import_jobs table is updated
|
||||
if 'wordpress_user_id' not in users_columns:
|
||||
op.add_column('users', sa.Column('wordpress_user_id', sa.BigInteger(), nullable=True))
|
||||
if 'wordpress_registered_date' not in users_columns:
|
||||
op.add_column('users', sa.Column('wordpress_registered_date', sa.DateTime(timezone=True), nullable=True))
|
||||
|
||||
# ============================================================
|
||||
# 2. EVENTS TABLE - Add calendar_uid
|
||||
# ============================================================
|
||||
events_columns = {col['name'] for col in inspector.get_columns('events')}
|
||||
|
||||
if 'calendar_uid' not in events_columns:
|
||||
op.add_column('events', sa.Column('calendar_uid', sa.String(), nullable=True))
|
||||
|
||||
# ============================================================
|
||||
# 3. SUBSCRIPTIONS TABLE - Add donation tracking
|
||||
# ============================================================
|
||||
subscriptions_columns = {col['name'] for col in inspector.get_columns('subscriptions')}
|
||||
|
||||
if 'base_subscription_cents' not in subscriptions_columns:
|
||||
op.add_column('subscriptions', sa.Column('base_subscription_cents', sa.Integer(), nullable=True))
|
||||
# Update existing rows: base_subscription_cents = amount_paid_cents - donation_cents (default 0)
|
||||
op.execute("UPDATE subscriptions SET base_subscription_cents = COALESCE(amount_paid_cents, 0) WHERE base_subscription_cents IS NULL")
|
||||
# Make it non-nullable after populating
|
||||
op.alter_column('subscriptions', 'base_subscription_cents', nullable=False)
|
||||
|
||||
if 'donation_cents' not in subscriptions_columns:
|
||||
op.add_column('subscriptions', sa.Column('donation_cents', sa.Integer(), nullable=False, server_default='0'))
|
||||
|
||||
if 'manual_payment' not in subscriptions_columns:
|
||||
op.add_column('subscriptions', sa.Column('manual_payment', sa.Boolean(), nullable=False, server_default='false'))
|
||||
|
||||
# ============================================================
|
||||
# 4. IMPORT_JOBS TABLE - Add WordPress import fields
|
||||
# ============================================================
|
||||
import_jobs_columns = {col['name'] for col in inspector.get_columns('import_jobs')}
|
||||
|
||||
if 'field_mapping' not in import_jobs_columns:
|
||||
op.add_column('import_jobs', sa.Column('field_mapping', sa.JSON(), nullable=False, server_default='{}'))
|
||||
|
||||
if 'wordpress_metadata' not in import_jobs_columns:
|
||||
op.add_column('import_jobs', sa.Column('wordpress_metadata', sa.JSON(), nullable=False, server_default='{}'))
|
||||
|
||||
if 'imported_user_ids' not in import_jobs_columns:
|
||||
op.add_column('import_jobs', sa.Column('imported_user_ids', sa.JSON(), nullable=False, server_default='[]'))
|
||||
|
||||
if 'rollback_at' not in import_jobs_columns:
|
||||
op.add_column('import_jobs', sa.Column('rollback_at', sa.DateTime(), nullable=True))
|
||||
|
||||
if 'rollback_by' not in import_jobs_columns:
|
||||
op.add_column('import_jobs', sa.Column('rollback_by', UUID(as_uuid=True), nullable=True))
|
||||
# Foreign key will be added if needed
|
||||
|
||||
# ============================================================
|
||||
# 5. CREATE IMPORT_ROLLBACK_AUDIT TABLE
|
||||
# ============================================================
|
||||
if 'import_rollback_audit' not in inspector.get_table_names():
|
||||
op.create_table(
|
||||
'import_rollback_audit',
|
||||
sa.Column('id', UUID(as_uuid=True), primary_key=True),
|
||||
sa.Column('import_job_id', UUID(as_uuid=True), sa.ForeignKey('import_jobs.id'), nullable=False),
|
||||
sa.Column('rolled_back_by', UUID(as_uuid=True), sa.ForeignKey('users.id'), nullable=False),
|
||||
sa.Column('rolled_back_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('deleted_user_count', sa.Integer(), nullable=False),
|
||||
sa.Column('deleted_user_ids', sa.JSON(), nullable=False),
|
||||
sa.Column('reason', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False)
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Remove all added columns and tables"""
|
||||
|
||||
# Drop import_rollback_audit table
|
||||
op.drop_table('import_rollback_audit')
|
||||
|
||||
# Drop import_jobs columns
|
||||
op.drop_column('import_jobs', 'rollback_by')
|
||||
op.drop_column('import_jobs', 'rollback_at')
|
||||
op.drop_column('import_jobs', 'imported_user_ids')
|
||||
op.drop_column('import_jobs', 'wordpress_metadata')
|
||||
op.drop_column('import_jobs', 'field_mapping')
|
||||
|
||||
# Drop subscriptions columns
|
||||
op.drop_column('subscriptions', 'manual_payment')
|
||||
op.drop_column('subscriptions', 'donation_cents')
|
||||
op.drop_column('subscriptions', 'base_subscription_cents')
|
||||
|
||||
# Drop events columns
|
||||
op.drop_column('events', 'calendar_uid')
|
||||
|
||||
# Drop users columns (in reverse order)
|
||||
op.drop_column('users', 'wordpress_registered_date')
|
||||
op.drop_column('users', 'wordpress_user_id')
|
||||
op.drop_column('users', 'import_job_id')
|
||||
op.drop_column('users', 'import_source')
|
||||
op.drop_column('users', 'rejected_by')
|
||||
op.drop_column('users', 'rejected_at')
|
||||
op.drop_column('users', 'rejection_reason')
|
||||
op.drop_column('users', 'last_renewal_reminder_at')
|
||||
op.drop_column('users', 'renewal_reminders_sent')
|
||||
op.drop_column('users', 'last_payment_reminder_at')
|
||||
op.drop_column('users', 'payment_reminders_sent')
|
||||
op.drop_column('users', 'last_event_attendance_reminder_at')
|
||||
op.drop_column('users', 'event_attendance_reminders_sent')
|
||||
op.drop_column('users', 'last_email_verification_reminder_at')
|
||||
op.drop_column('users', 'email_verification_reminders_sent')
|
||||
op.drop_column('users', 'member_since')
|
||||
op.drop_column('users', 'tos_accepted_at')
|
||||
op.drop_column('users', 'accepts_tos')
|
||||
op.drop_column('users', 'force_password_change')
|
||||
op.drop_column('users', 'password_reset_expires')
|
||||
op.drop_column('users', 'password_reset_token')
|
||||
op.drop_column('users', 'show_in_directory')
|
||||
op.drop_column('users', 'scholarship_requested')
|
||||
op.drop_column('users', 'volunteer_interests')
|
||||
op.drop_column('users', 'newsletter_publish_none')
|
||||
op.drop_column('users', 'newsletter_publish_birthday')
|
||||
op.drop_column('users', 'newsletter_publish_photo')
|
||||
op.drop_column('users', 'newsletter_publish_name')
|
||||
37
alembic/versions/010_add_email_verification_expires.py
Normal file
37
alembic/versions/010_add_email_verification_expires.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""add_email_verification_expires
|
||||
|
||||
Revision ID: 010_add_email_exp
|
||||
Revises: 009_add_all_missing
|
||||
Create Date: 2026-01-05
|
||||
|
||||
Fixes:
|
||||
- Add missing email_verification_expires column to users table
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '010_add_email_exp'
|
||||
down_revision: Union[str, None] = '009_add_all_missing'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Add email_verification_expires column (skip if already exists)"""
|
||||
from sqlalchemy import inspect
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
existing_columns = {col['name'] for col in inspector.get_columns('users')}
|
||||
|
||||
# Add email_verification_expires if missing
|
||||
if 'email_verification_expires' not in existing_columns:
|
||||
op.add_column('users', sa.Column('email_verification_expires', sa.DateTime(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Remove email_verification_expires column"""
|
||||
op.drop_column('users', 'email_verification_expires')
|
||||
410
alembic/versions/011_align_prod_with_dev.py
Normal file
410
alembic/versions/011_align_prod_with_dev.py
Normal file
@@ -0,0 +1,410 @@
|
||||
"""align_prod_with_dev
|
||||
|
||||
Revision ID: 011_align_prod_dev
|
||||
Revises: 010_add_email_exp
|
||||
Create Date: 2026-01-05
|
||||
|
||||
Aligns PROD database schema with DEV database schema (source of truth).
|
||||
Fixes type mismatches, removes PROD-only columns, adds DEV-only columns, updates nullable constraints.
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import JSONB, JSON
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '011_align_prod_dev'
|
||||
down_revision: Union[str, None] = '010_add_email_exp'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Align PROD schema with DEV schema (source of truth)"""
|
||||
from sqlalchemy import inspect
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
|
||||
print("Starting schema alignment: PROD → DEV (source of truth)...")
|
||||
|
||||
# ============================================================
|
||||
# 1. FIX USERS TABLE
|
||||
# ============================================================
|
||||
print("\n[1/14] Fixing users table...")
|
||||
|
||||
users_columns = {col['name'] for col in inspector.get_columns('users')}
|
||||
|
||||
# Remove PROD-only columns (not in models.py or DEV)
|
||||
if 'bio' in users_columns:
|
||||
op.drop_column('users', 'bio')
|
||||
print(" ✓ Removed users.bio (PROD-only)")
|
||||
|
||||
if 'interests' in users_columns:
|
||||
op.drop_column('users', 'interests')
|
||||
print(" ✓ Removed users.interests (PROD-only)")
|
||||
|
||||
try:
|
||||
# Change constrained VARCHAR(n) to unconstrained VARCHAR
|
||||
op.alter_column('users', 'first_name', type_=sa.String(), postgresql_using='first_name::varchar')
|
||||
op.alter_column('users', 'last_name', type_=sa.String(), postgresql_using='last_name::varchar')
|
||||
op.alter_column('users', 'email', type_=sa.String(), postgresql_using='email::varchar')
|
||||
op.alter_column('users', 'phone', type_=sa.String(), postgresql_using='phone::varchar')
|
||||
op.alter_column('users', 'city', type_=sa.String(), postgresql_using='city::varchar')
|
||||
op.alter_column('users', 'state', type_=sa.String(), postgresql_using='state::varchar')
|
||||
op.alter_column('users', 'zipcode', type_=sa.String(), postgresql_using='zipcode::varchar')
|
||||
op.alter_column('users', 'partner_first_name', type_=sa.String(), postgresql_using='partner_first_name::varchar')
|
||||
op.alter_column('users', 'partner_last_name', type_=sa.String(), postgresql_using='partner_last_name::varchar')
|
||||
op.alter_column('users', 'referred_by_member_name', type_=sa.String(), postgresql_using='referred_by_member_name::varchar')
|
||||
op.alter_column('users', 'password_hash', type_=sa.String(), postgresql_using='password_hash::varchar')
|
||||
op.alter_column('users', 'email_verification_token', type_=sa.String(), postgresql_using='email_verification_token::varchar')
|
||||
op.alter_column('users', 'password_reset_token', type_=sa.String(), postgresql_using='password_reset_token::varchar')
|
||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
||||
|
||||
# Change TEXT to VARCHAR
|
||||
op.alter_column('users', 'address', type_=sa.String(), postgresql_using='address::varchar')
|
||||
op.alter_column('users', 'profile_photo_url', type_=sa.String(), postgresql_using='profile_photo_url::varchar')
|
||||
print(" ✓ Changed TEXT to VARCHAR")
|
||||
|
||||
# Change DATE to TIMESTAMP
|
||||
op.alter_column('users', 'date_of_birth', type_=sa.DateTime(), postgresql_using='date_of_birth::timestamp')
|
||||
op.alter_column('users', 'member_since', type_=sa.DateTime(), postgresql_using='member_since::timestamp')
|
||||
print(" ✓ Changed DATE to TIMESTAMP")
|
||||
|
||||
# Change JSONB to JSON
|
||||
op.alter_column('users', 'lead_sources', type_=JSON(), postgresql_using='lead_sources::json')
|
||||
print(" ✓ Changed lead_sources JSONB to JSON")
|
||||
|
||||
# Change TEXT to JSON for volunteer_interests
|
||||
op.alter_column('users', 'volunteer_interests', type_=JSON(), postgresql_using='volunteer_interests::json')
|
||||
print(" ✓ Changed volunteer_interests TEXT to JSON")
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: Some type conversions failed: {e}")
|
||||
|
||||
# Fill NULL values with defaults BEFORE setting NOT NULL constraints
|
||||
print(" ⏳ Filling NULL values with defaults...")
|
||||
|
||||
# Update string fields
|
||||
conn.execute(sa.text("UPDATE users SET address = '' WHERE address IS NULL"))
|
||||
conn.execute(sa.text("UPDATE users SET city = '' WHERE city IS NULL"))
|
||||
conn.execute(sa.text("UPDATE users SET state = '' WHERE state IS NULL"))
|
||||
conn.execute(sa.text("UPDATE users SET zipcode = '' WHERE zipcode IS NULL"))
|
||||
conn.execute(sa.text("UPDATE users SET phone = '' WHERE phone IS NULL"))
|
||||
|
||||
# Update date_of_birth with sentinel date
|
||||
conn.execute(sa.text("UPDATE users SET date_of_birth = '1900-01-01'::timestamp WHERE date_of_birth IS NULL"))
|
||||
|
||||
# Update boolean fields
|
||||
conn.execute(sa.text("UPDATE users SET show_in_directory = false WHERE show_in_directory IS NULL"))
|
||||
conn.execute(sa.text("UPDATE users SET newsletter_publish_name = false WHERE newsletter_publish_name IS NULL"))
|
||||
conn.execute(sa.text("UPDATE users SET newsletter_publish_birthday = false WHERE newsletter_publish_birthday IS NULL"))
|
||||
conn.execute(sa.text("UPDATE users SET newsletter_publish_photo = false WHERE newsletter_publish_photo IS NULL"))
|
||||
conn.execute(sa.text("UPDATE users SET newsletter_publish_none = false WHERE newsletter_publish_none IS NULL"))
|
||||
conn.execute(sa.text("UPDATE users SET force_password_change = false WHERE force_password_change IS NULL"))
|
||||
conn.execute(sa.text("UPDATE users SET scholarship_requested = false WHERE scholarship_requested IS NULL"))
|
||||
conn.execute(sa.text("UPDATE users SET accepts_tos = false WHERE accepts_tos IS NULL"))
|
||||
|
||||
# Check how many rows were updated
|
||||
null_check = conn.execute(sa.text("""
|
||||
SELECT
|
||||
COUNT(*) FILTER (WHERE address = '') as address_filled,
|
||||
COUNT(*) FILTER (WHERE date_of_birth = '1900-01-01'::timestamp) as dob_filled
|
||||
FROM users
|
||||
""")).fetchone()
|
||||
print(f" ✓ Filled NULLs: {null_check[0]} addresses, {null_check[1]} dates of birth")
|
||||
|
||||
# Now safe to set NOT NULL constraints
|
||||
op.alter_column('users', 'address', nullable=False)
|
||||
op.alter_column('users', 'city', nullable=False)
|
||||
op.alter_column('users', 'state', nullable=False)
|
||||
op.alter_column('users', 'zipcode', nullable=False)
|
||||
op.alter_column('users', 'phone', nullable=False)
|
||||
op.alter_column('users', 'date_of_birth', nullable=False)
|
||||
op.alter_column('users', 'show_in_directory', nullable=False)
|
||||
op.alter_column('users', 'newsletter_publish_name', nullable=False)
|
||||
op.alter_column('users', 'newsletter_publish_birthday', nullable=False)
|
||||
op.alter_column('users', 'newsletter_publish_photo', nullable=False)
|
||||
op.alter_column('users', 'newsletter_publish_none', nullable=False)
|
||||
op.alter_column('users', 'force_password_change', nullable=False)
|
||||
op.alter_column('users', 'scholarship_requested', nullable=False)
|
||||
op.alter_column('users', 'accepts_tos', nullable=False)
|
||||
print(" ✓ Set NOT NULL constraints")
|
||||
|
||||
# ============================================================
|
||||
# 2. FIX DONATIONS TABLE
|
||||
# ============================================================
|
||||
print("\n[2/14] Fixing donations table...")
|
||||
|
||||
donations_columns = {col['name'] for col in inspector.get_columns('donations')}
|
||||
|
||||
# Remove PROD-only columns
|
||||
if 'is_anonymous' in donations_columns:
|
||||
op.drop_column('donations', 'is_anonymous')
|
||||
print(" ✓ Removed donations.is_anonymous (PROD-only)")
|
||||
|
||||
if 'completed_at' in donations_columns:
|
||||
op.drop_column('donations', 'completed_at')
|
||||
print(" ✓ Removed donations.completed_at (PROD-only)")
|
||||
|
||||
if 'message' in donations_columns:
|
||||
op.drop_column('donations', 'message')
|
||||
print(" ✓ Removed donations.message (PROD-only)")
|
||||
|
||||
try:
|
||||
op.alter_column('donations', 'donor_email', type_=sa.String(), postgresql_using='donor_email::varchar')
|
||||
op.alter_column('donations', 'donor_name', type_=sa.String(), postgresql_using='donor_name::varchar')
|
||||
op.alter_column('donations', 'stripe_payment_intent_id', type_=sa.String(), postgresql_using='stripe_payment_intent_id::varchar')
|
||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: Type conversion failed: {e}")
|
||||
|
||||
# ============================================================
|
||||
# 3. FIX SUBSCRIPTIONS TABLE
|
||||
# ============================================================
|
||||
print("\n[3/14] Fixing subscriptions table...")
|
||||
|
||||
subscriptions_columns = {col['name'] for col in inspector.get_columns('subscriptions')}
|
||||
|
||||
# Remove PROD-only columns
|
||||
if 'cancel_at_period_end' in subscriptions_columns:
|
||||
op.drop_column('subscriptions', 'cancel_at_period_end')
|
||||
print(" ✓ Removed subscriptions.cancel_at_period_end (PROD-only)")
|
||||
|
||||
if 'canceled_at' in subscriptions_columns:
|
||||
op.drop_column('subscriptions', 'canceled_at')
|
||||
print(" ✓ Removed subscriptions.canceled_at (PROD-only)")
|
||||
|
||||
if 'current_period_start' in subscriptions_columns:
|
||||
op.drop_column('subscriptions', 'current_period_start')
|
||||
print(" ✓ Removed subscriptions.current_period_start (PROD-only)")
|
||||
|
||||
if 'current_period_end' in subscriptions_columns:
|
||||
op.drop_column('subscriptions', 'current_period_end')
|
||||
print(" ✓ Removed subscriptions.current_period_end (PROD-only)")
|
||||
|
||||
try:
|
||||
op.alter_column('subscriptions', 'stripe_subscription_id', type_=sa.String(), postgresql_using='stripe_subscription_id::varchar')
|
||||
op.alter_column('subscriptions', 'stripe_customer_id', type_=sa.String(), postgresql_using='stripe_customer_id::varchar')
|
||||
op.alter_column('subscriptions', 'payment_method', type_=sa.String(), postgresql_using='payment_method::varchar')
|
||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: Type conversion failed: {e}")
|
||||
|
||||
# Fix nullable constraints
|
||||
op.alter_column('subscriptions', 'start_date', nullable=False)
|
||||
op.alter_column('subscriptions', 'manual_payment', nullable=False)
|
||||
op.alter_column('subscriptions', 'donation_cents', nullable=False)
|
||||
op.alter_column('subscriptions', 'base_subscription_cents', nullable=False)
|
||||
print(" ✓ Fixed nullable constraints")
|
||||
|
||||
# ============================================================
|
||||
# 4. FIX STORAGE_USAGE TABLE
|
||||
# ============================================================
|
||||
print("\n[4/14] Fixing storage_usage table...")
|
||||
|
||||
storage_columns = {col['name'] for col in inspector.get_columns('storage_usage')}
|
||||
|
||||
# Remove PROD-only columns
|
||||
if 'created_at' in storage_columns:
|
||||
op.drop_column('storage_usage', 'created_at')
|
||||
print(" ✓ Removed storage_usage.created_at (PROD-only)")
|
||||
|
||||
if 'updated_at' in storage_columns:
|
||||
op.drop_column('storage_usage', 'updated_at')
|
||||
print(" ✓ Removed storage_usage.updated_at (PROD-only)")
|
||||
|
||||
op.alter_column('storage_usage', 'max_bytes_allowed', nullable=False)
|
||||
print(" ✓ Fixed nullable constraint")
|
||||
|
||||
# ============================================================
|
||||
# 5. FIX EVENT_GALLERIES TABLE (Add missing DEV columns)
|
||||
# ============================================================
|
||||
print("\n[5/14] Fixing event_galleries table...")
|
||||
|
||||
event_galleries_columns = {col['name'] for col in inspector.get_columns('event_galleries')}
|
||||
|
||||
# Add DEV-only columns (exist in models.py but not in PROD)
|
||||
if 'image_key' not in event_galleries_columns:
|
||||
op.add_column('event_galleries', sa.Column('image_key', sa.String(), nullable=False, server_default=''))
|
||||
print(" ✓ Added event_galleries.image_key")
|
||||
|
||||
if 'file_size_bytes' not in event_galleries_columns:
|
||||
op.add_column('event_galleries', sa.Column('file_size_bytes', sa.Integer(), nullable=False, server_default='0'))
|
||||
print(" ✓ Added event_galleries.file_size_bytes")
|
||||
|
||||
try:
|
||||
op.alter_column('event_galleries', 'image_url', type_=sa.String(), postgresql_using='image_url::varchar')
|
||||
print(" ✓ Changed TEXT to VARCHAR")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: Type conversion failed: {e}")
|
||||
|
||||
# Note: uploaded_by column already has correct nullable=False in both DEV and PROD
|
||||
|
||||
# ============================================================
|
||||
# 6. FIX BYLAWS_DOCUMENTS TABLE
|
||||
# ============================================================
|
||||
print("\n[6/14] Fixing bylaws_documents table...")
|
||||
|
||||
bylaws_columns = {col['name'] for col in inspector.get_columns('bylaws_documents')}
|
||||
|
||||
# Remove PROD-only column
|
||||
if 'updated_at' in bylaws_columns:
|
||||
op.drop_column('bylaws_documents', 'updated_at')
|
||||
print(" ✓ Removed bylaws_documents.updated_at (PROD-only)")
|
||||
|
||||
try:
|
||||
op.alter_column('bylaws_documents', 'title', type_=sa.String(), postgresql_using='title::varchar')
|
||||
op.alter_column('bylaws_documents', 'version', type_=sa.String(), postgresql_using='version::varchar')
|
||||
op.alter_column('bylaws_documents', 'document_url', type_=sa.String(), postgresql_using='document_url::varchar')
|
||||
op.alter_column('bylaws_documents', 'document_type', type_=sa.String(), postgresql_using='document_type::varchar')
|
||||
print(" ✓ Changed column types")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: Type conversion failed: {e}")
|
||||
|
||||
op.alter_column('bylaws_documents', 'document_type', nullable=True)
|
||||
print(" ✓ Fixed nullable constraint")
|
||||
|
||||
# ============================================================
|
||||
# 7. FIX EVENTS TABLE
|
||||
# ============================================================
|
||||
print("\n[7/14] Fixing events table...")
|
||||
|
||||
try:
|
||||
op.alter_column('events', 'title', type_=sa.String(), postgresql_using='title::varchar')
|
||||
op.alter_column('events', 'location', type_=sa.String(), postgresql_using='location::varchar')
|
||||
op.alter_column('events', 'calendar_uid', type_=sa.String(), postgresql_using='calendar_uid::varchar')
|
||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: {e}")
|
||||
|
||||
op.alter_column('events', 'location', nullable=False)
|
||||
op.alter_column('events', 'created_by', nullable=False)
|
||||
print(" ✓ Fixed nullable constraints")
|
||||
|
||||
# ============================================================
|
||||
# 8. FIX PERMISSIONS TABLE
|
||||
# ============================================================
|
||||
print("\n[8/14] Fixing permissions table...")
|
||||
|
||||
try:
|
||||
op.alter_column('permissions', 'code', type_=sa.String(), postgresql_using='code::varchar')
|
||||
op.alter_column('permissions', 'name', type_=sa.String(), postgresql_using='name::varchar')
|
||||
op.alter_column('permissions', 'module', type_=sa.String(), postgresql_using='module::varchar')
|
||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: {e}")
|
||||
|
||||
op.alter_column('permissions', 'module', nullable=False)
|
||||
print(" ✓ Fixed nullable constraint")
|
||||
|
||||
# ============================================================
|
||||
# 9. FIX ROLES TABLE
|
||||
# ============================================================
|
||||
print("\n[9/14] Fixing roles table...")
|
||||
|
||||
try:
|
||||
op.alter_column('roles', 'code', type_=sa.String(), postgresql_using='code::varchar')
|
||||
op.alter_column('roles', 'name', type_=sa.String(), postgresql_using='name::varchar')
|
||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: {e}")
|
||||
|
||||
op.alter_column('roles', 'is_system_role', nullable=False)
|
||||
print(" ✓ Fixed nullable constraint")
|
||||
|
||||
# ============================================================
|
||||
# 10. FIX USER_INVITATIONS TABLE
|
||||
# ============================================================
|
||||
print("\n[10/14] Fixing user_invitations table...")
|
||||
|
||||
try:
|
||||
op.alter_column('user_invitations', 'email', type_=sa.String(), postgresql_using='email::varchar')
|
||||
op.alter_column('user_invitations', 'token', type_=sa.String(), postgresql_using='token::varchar')
|
||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: {e}")
|
||||
|
||||
op.alter_column('user_invitations', 'invited_at', nullable=False)
|
||||
print(" ✓ Fixed nullable constraint")
|
||||
|
||||
# ============================================================
|
||||
# 11. FIX NEWSLETTER_ARCHIVES TABLE
|
||||
# ============================================================
|
||||
print("\n[11/14] Fixing newsletter_archives table...")
|
||||
|
||||
try:
|
||||
op.alter_column('newsletter_archives', 'title', type_=sa.String(), postgresql_using='title::varchar')
|
||||
op.alter_column('newsletter_archives', 'document_url', type_=sa.String(), postgresql_using='document_url::varchar')
|
||||
op.alter_column('newsletter_archives', 'document_type', type_=sa.String(), postgresql_using='document_type::varchar')
|
||||
print(" ✓ Changed column types")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: {e}")
|
||||
|
||||
op.alter_column('newsletter_archives', 'document_type', nullable=True)
|
||||
print(" ✓ Fixed nullable constraint")
|
||||
|
||||
# ============================================================
|
||||
# 12. FIX FINANCIAL_REPORTS TABLE
|
||||
# ============================================================
|
||||
print("\n[12/14] Fixing financial_reports table...")
|
||||
|
||||
try:
|
||||
op.alter_column('financial_reports', 'title', type_=sa.String(), postgresql_using='title::varchar')
|
||||
op.alter_column('financial_reports', 'document_url', type_=sa.String(), postgresql_using='document_url::varchar')
|
||||
op.alter_column('financial_reports', 'document_type', type_=sa.String(), postgresql_using='document_type::varchar')
|
||||
print(" ✓ Changed column types")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: {e}")
|
||||
|
||||
op.alter_column('financial_reports', 'document_type', nullable=True)
|
||||
print(" ✓ Fixed nullable constraint")
|
||||
|
||||
# ============================================================
|
||||
# 13. FIX IMPORT_JOBS TABLE
|
||||
# ============================================================
|
||||
print("\n[13/14] Fixing import_jobs table...")
|
||||
|
||||
try:
|
||||
op.alter_column('import_jobs', 'filename', type_=sa.String(), postgresql_using='filename::varchar')
|
||||
op.alter_column('import_jobs', 'file_key', type_=sa.String(), postgresql_using='file_key::varchar')
|
||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
||||
|
||||
# Change JSONB to JSON
|
||||
op.alter_column('import_jobs', 'errors', type_=JSON(), postgresql_using='errors::json')
|
||||
print(" ✓ Changed errors JSONB to JSON")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: {e}")
|
||||
|
||||
# Fix nullable constraints
|
||||
op.alter_column('import_jobs', 'processed_rows', nullable=False)
|
||||
op.alter_column('import_jobs', 'successful_rows', nullable=False)
|
||||
op.alter_column('import_jobs', 'failed_rows', nullable=False)
|
||||
op.alter_column('import_jobs', 'errors', nullable=False)
|
||||
op.alter_column('import_jobs', 'started_at', nullable=False)
|
||||
print(" ✓ Fixed nullable constraints")
|
||||
|
||||
# ============================================================
|
||||
# 14. FIX SUBSCRIPTION_PLANS TABLE
|
||||
# ============================================================
|
||||
print("\n[14/14] Fixing subscription_plans table...")
|
||||
|
||||
try:
|
||||
op.alter_column('subscription_plans', 'name', type_=sa.String(), postgresql_using='name::varchar')
|
||||
op.alter_column('subscription_plans', 'billing_cycle', type_=sa.String(), postgresql_using='billing_cycle::varchar')
|
||||
op.alter_column('subscription_plans', 'stripe_price_id', type_=sa.String(), postgresql_using='stripe_price_id::varchar')
|
||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: {e}")
|
||||
|
||||
op.alter_column('subscription_plans', 'minimum_price_cents', nullable=False)
|
||||
print(" ✓ Fixed nullable constraint")
|
||||
|
||||
print("\n✅ Schema alignment complete! PROD now matches DEV (source of truth)")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Revert alignment changes (not recommended)"""
|
||||
print("⚠️ Downgrade not supported for alignment migration")
|
||||
print(" To revert, restore from backup")
|
||||
pass
|
||||
170
alembic/versions/012_fix_remaining_differences.py
Normal file
170
alembic/versions/012_fix_remaining_differences.py
Normal file
@@ -0,0 +1,170 @@
|
||||
"""fix_remaining_differences
|
||||
|
||||
Revision ID: 012_fix_remaining
|
||||
Revises: 011_align_prod_dev
|
||||
Create Date: 2026-01-05
|
||||
|
||||
Fixes the last 5 schema differences found after migration 011:
|
||||
1-2. import_rollback_audit nullable constraints (PROD)
|
||||
3-4. role_permissions type and nullable (PROD)
|
||||
5. UserStatus enum values (DEV - remove deprecated values)
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import ENUM
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '012_fix_remaining'
|
||||
down_revision: Union[str, None] = '011_align_prod_dev'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Fix remaining schema differences"""
|
||||
from sqlalchemy import inspect
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
|
||||
print("Fixing remaining schema differences...")
|
||||
|
||||
# ============================================================
|
||||
# 1. FIX IMPORT_ROLLBACK_AUDIT TABLE (PROD only)
|
||||
# ============================================================
|
||||
print("\n[1/3] Fixing import_rollback_audit nullable constraints...")
|
||||
|
||||
# Check if there are any NULL values first
|
||||
try:
|
||||
null_count = conn.execute(sa.text("""
|
||||
SELECT COUNT(*) FROM import_rollback_audit
|
||||
WHERE created_at IS NULL OR rolled_back_at IS NULL
|
||||
""")).scalar()
|
||||
|
||||
if null_count > 0:
|
||||
# Fill NULLs with current timestamp
|
||||
conn.execute(sa.text("""
|
||||
UPDATE import_rollback_audit
|
||||
SET created_at = NOW() WHERE created_at IS NULL
|
||||
"""))
|
||||
conn.execute(sa.text("""
|
||||
UPDATE import_rollback_audit
|
||||
SET rolled_back_at = NOW() WHERE rolled_back_at IS NULL
|
||||
"""))
|
||||
print(f" ✓ Filled {null_count} NULL timestamps")
|
||||
|
||||
# Now set NOT NULL
|
||||
op.alter_column('import_rollback_audit', 'created_at', nullable=False)
|
||||
op.alter_column('import_rollback_audit', 'rolled_back_at', nullable=False)
|
||||
print(" ✓ Set NOT NULL constraints")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: {e}")
|
||||
|
||||
# ============================================================
|
||||
# 2. FIX ROLE_PERMISSIONS TABLE (PROD only)
|
||||
# ============================================================
|
||||
print("\n[2/3] Fixing role_permissions.role type and nullable...")
|
||||
|
||||
try:
|
||||
# Change VARCHAR(50) to VARCHAR(10) to match UserRole enum
|
||||
op.alter_column('role_permissions', 'role',
|
||||
type_=sa.String(10),
|
||||
postgresql_using='role::varchar(10)')
|
||||
print(" ✓ Changed VARCHAR(50) to VARCHAR(10)")
|
||||
|
||||
# Set NOT NULL
|
||||
op.alter_column('role_permissions', 'role', nullable=False)
|
||||
print(" ✓ Set NOT NULL constraint")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: {e}")
|
||||
|
||||
# ============================================================
|
||||
# 3. FIX USERSTATUS ENUM (DEV only - remove deprecated values)
|
||||
# ============================================================
|
||||
print("\n[3/3] Fixing UserStatus enum values...")
|
||||
|
||||
try:
|
||||
# First, check if the enum has deprecated values
|
||||
enum_values = conn.execute(sa.text("""
|
||||
SELECT enumlabel
|
||||
FROM pg_enum
|
||||
WHERE enumtypid = (
|
||||
SELECT oid FROM pg_type WHERE typname = 'userstatus'
|
||||
)
|
||||
""")).fetchall()
|
||||
|
||||
enum_values_list = [row[0] for row in enum_values]
|
||||
has_deprecated = 'pending_approval' in enum_values_list or 'pre_approved' in enum_values_list
|
||||
|
||||
if not has_deprecated:
|
||||
print(" ✓ UserStatus enum already correct (no deprecated values)")
|
||||
else:
|
||||
print(" ⏳ Found deprecated enum values, migrating...")
|
||||
|
||||
# Check if any users have deprecated status values
|
||||
deprecated_count = conn.execute(sa.text("""
|
||||
SELECT COUNT(*) FROM users
|
||||
WHERE status IN ('pending_approval', 'pre_approved')
|
||||
""")).scalar()
|
||||
|
||||
if deprecated_count > 0:
|
||||
print(f" ⏳ Migrating {deprecated_count} users with deprecated status values...")
|
||||
|
||||
# Migrate deprecated values to new equivalents
|
||||
conn.execute(sa.text("""
|
||||
UPDATE users
|
||||
SET status = 'pre_validated'
|
||||
WHERE status = 'pre_approved'
|
||||
"""))
|
||||
|
||||
conn.execute(sa.text("""
|
||||
UPDATE users
|
||||
SET status = 'payment_pending'
|
||||
WHERE status = 'pending_approval'
|
||||
"""))
|
||||
|
||||
print(" ✓ Migrated deprecated status values")
|
||||
else:
|
||||
print(" ✓ No users with deprecated status values")
|
||||
|
||||
# Now remove deprecated enum values
|
||||
# PostgreSQL doesn't support removing enum values directly,
|
||||
# so we need to recreate the enum
|
||||
conn.execute(sa.text("""
|
||||
-- Create new enum with correct values (matches models.py)
|
||||
CREATE TYPE userstatus_new AS ENUM (
|
||||
'pending_email',
|
||||
'pending_validation',
|
||||
'pre_validated',
|
||||
'payment_pending',
|
||||
'active',
|
||||
'inactive',
|
||||
'canceled',
|
||||
'expired',
|
||||
'rejected',
|
||||
'abandoned'
|
||||
);
|
||||
|
||||
-- Update column to use new enum
|
||||
ALTER TABLE users
|
||||
ALTER COLUMN status TYPE userstatus_new
|
||||
USING status::text::userstatus_new;
|
||||
|
||||
-- Drop old enum and rename new one
|
||||
DROP TYPE userstatus;
|
||||
ALTER TYPE userstatus_new RENAME TO userstatus;
|
||||
"""))
|
||||
|
||||
print(" ✓ Updated UserStatus enum (removed deprecated values)")
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: Enum update failed (may already be correct): {e}")
|
||||
|
||||
print("\n✅ All remaining differences fixed!")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Revert fixes (not recommended)"""
|
||||
print("⚠️ Downgrade not supported")
|
||||
pass
|
||||
147
alembic/versions/013_sync_role_permissions.py
Normal file
147
alembic/versions/013_sync_role_permissions.py
Normal file
@@ -0,0 +1,147 @@
|
||||
"""sync_role_permissions
|
||||
|
||||
Revision ID: 013_sync_permissions
|
||||
Revises: 012_fix_remaining
|
||||
Create Date: 2026-01-05
|
||||
|
||||
Syncs role_permissions between DEV and PROD bidirectionally.
|
||||
- Adds 18 DEV-only permissions to PROD (new features)
|
||||
- Adds 6 PROD-only permissions to DEV (operational/security)
|
||||
Result: Both environments have identical 142 permission mappings
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '013_sync_permissions'
|
||||
down_revision: Union[str, None] = '012_fix_remaining'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Sync role_permissions bidirectionally"""
|
||||
from sqlalchemy import text
|
||||
|
||||
conn = op.get_bind()
|
||||
|
||||
print("Syncing role_permissions between environments...")
|
||||
|
||||
# ============================================================
|
||||
# STEP 1: Add missing permissions to ensure all exist
|
||||
# ============================================================
|
||||
print("\n[1/2] Ensuring all permissions exist...")
|
||||
|
||||
# Permissions that should exist (union of both environments)
|
||||
all_permissions = [
|
||||
# From DEV-only list
|
||||
('donations.export', 'Export Donations', 'donations'),
|
||||
('donations.view', 'View Donations', 'donations'),
|
||||
('financials.create', 'Create Financial Reports', 'financials'),
|
||||
('financials.delete', 'Delete Financial Reports', 'financials'),
|
||||
('financials.edit', 'Edit Financial Reports', 'financials'),
|
||||
('financials.export', 'Export Financial Reports', 'financials'),
|
||||
('financials.payments', 'Manage Financial Payments', 'financials'),
|
||||
('settings.edit', 'Edit Settings', 'settings'),
|
||||
('settings.email_templates', 'Manage Email Templates', 'settings'),
|
||||
('subscriptions.activate', 'Activate Subscriptions', 'subscriptions'),
|
||||
('subscriptions.cancel', 'Cancel Subscriptions', 'subscriptions'),
|
||||
('subscriptions.create', 'Create Subscriptions', 'subscriptions'),
|
||||
('subscriptions.edit', 'Edit Subscriptions', 'subscriptions'),
|
||||
('subscriptions.export', 'Export Subscriptions', 'subscriptions'),
|
||||
('subscriptions.plans', 'Manage Subscription Plans', 'subscriptions'),
|
||||
('subscriptions.view', 'View Subscriptions', 'subscriptions'),
|
||||
('events.calendar_export', 'Export Event Calendar', 'events'),
|
||||
('events.rsvps', 'View Event RSVPs', 'events'),
|
||||
# From PROD-only list
|
||||
('permissions.audit', 'Audit Permissions', 'permissions'),
|
||||
('permissions.view', 'View Permissions', 'permissions'),
|
||||
('settings.backup', 'Manage Backups', 'settings'),
|
||||
]
|
||||
|
||||
for code, name, module in all_permissions:
|
||||
# Insert if not exists
|
||||
conn.execute(text(f"""
|
||||
INSERT INTO permissions (id, code, name, description, module, created_at)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
'{code}',
|
||||
'{name}',
|
||||
'{name}',
|
||||
'{module}',
|
||||
NOW()
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1 FROM permissions WHERE code = '{code}'
|
||||
)
|
||||
"""))
|
||||
|
||||
print(" ✓ Ensured all permissions exist")
|
||||
|
||||
# ============================================================
|
||||
# STEP 2: Add missing role-permission mappings
|
||||
# ============================================================
|
||||
print("\n[2/2] Adding missing role-permission mappings...")
|
||||
|
||||
# Mappings that should exist (union of both environments)
|
||||
role_permission_mappings = [
|
||||
# DEV-only (add to PROD)
|
||||
('admin', 'donations.export'),
|
||||
('admin', 'donations.view'),
|
||||
('admin', 'financials.create'),
|
||||
('admin', 'financials.delete'),
|
||||
('admin', 'financials.edit'),
|
||||
('admin', 'financials.export'),
|
||||
('admin', 'financials.payments'),
|
||||
('admin', 'settings.edit'),
|
||||
('admin', 'settings.email_templates'),
|
||||
('admin', 'subscriptions.activate'),
|
||||
('admin', 'subscriptions.cancel'),
|
||||
('admin', 'subscriptions.create'),
|
||||
('admin', 'subscriptions.edit'),
|
||||
('admin', 'subscriptions.export'),
|
||||
('admin', 'subscriptions.plans'),
|
||||
('admin', 'subscriptions.view'),
|
||||
('member', 'events.calendar_export'),
|
||||
('member', 'events.rsvps'),
|
||||
# PROD-only (add to DEV)
|
||||
('admin', 'permissions.audit'),
|
||||
('admin', 'permissions.view'),
|
||||
('admin', 'settings.backup'),
|
||||
('finance', 'bylaws.view'),
|
||||
('finance', 'events.view'),
|
||||
('finance', 'newsletters.view'),
|
||||
]
|
||||
|
||||
added_count = 0
|
||||
for role, perm_code in role_permission_mappings:
|
||||
result = conn.execute(text(f"""
|
||||
INSERT INTO role_permissions (id, role, permission_id, created_at)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
'{role}',
|
||||
p.id,
|
||||
NOW()
|
||||
FROM permissions p
|
||||
WHERE p.code = '{perm_code}'
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM role_permissions rp
|
||||
WHERE rp.role = '{role}'
|
||||
AND rp.permission_id = p.id
|
||||
)
|
||||
RETURNING id
|
||||
"""))
|
||||
if result.rowcount > 0:
|
||||
added_count += 1
|
||||
|
||||
print(f" ✓ Added {added_count} missing role-permission mappings")
|
||||
|
||||
# Verify final count
|
||||
final_count = conn.execute(text("SELECT COUNT(*) FROM role_permissions")).scalar()
|
||||
print(f"\n✅ Role-permission mappings synchronized: {final_count} total")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Revert sync (not recommended)"""
|
||||
print("⚠️ Downgrade not supported - permissions are additive")
|
||||
pass
|
||||
48
alembic/versions/4fa11836f7fd_add_role_audit_fields.py
Normal file
48
alembic/versions/4fa11836f7fd_add_role_audit_fields.py
Normal file
@@ -0,0 +1,48 @@
|
||||
"""add_role_audit_fields
|
||||
|
||||
Revision ID: 4fa11836f7fd
|
||||
Revises: 013_sync_permissions
|
||||
Create Date: 2026-01-16 17:21:40.514605
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '4fa11836f7fd'
|
||||
down_revision: Union[str, None] = '013_sync_permissions'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Add role audit trail columns
|
||||
op.add_column('users', sa.Column('role_changed_at', sa.DateTime(timezone=True), nullable=True))
|
||||
op.add_column('users', sa.Column('role_changed_by', UUID(as_uuid=True), nullable=True))
|
||||
|
||||
# Create foreign key constraint to track who changed the role
|
||||
op.create_foreign_key(
|
||||
'fk_users_role_changed_by',
|
||||
'users', 'users',
|
||||
['role_changed_by'], ['id'],
|
||||
ondelete='SET NULL'
|
||||
)
|
||||
|
||||
# Create index for efficient querying by role change date
|
||||
op.create_index('idx_users_role_changed_at', 'users', ['role_changed_at'])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop index first
|
||||
op.drop_index('idx_users_role_changed_at')
|
||||
|
||||
# Drop foreign key constraint
|
||||
op.drop_constraint('fk_users_role_changed_by', 'users', type_='foreignkey')
|
||||
|
||||
# Drop columns
|
||||
op.drop_column('users', 'role_changed_by')
|
||||
op.drop_column('users', 'role_changed_at')
|
||||
68
alembic/versions/ec4cb4a49cde_add_system_settings_table.py
Normal file
68
alembic/versions/ec4cb4a49cde_add_system_settings_table.py
Normal file
@@ -0,0 +1,68 @@
|
||||
"""add_system_settings_table
|
||||
|
||||
Revision ID: ec4cb4a49cde
|
||||
Revises: 4fa11836f7fd
|
||||
Create Date: 2026-01-16 18:16:00.283455
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'ec4cb4a49cde'
|
||||
down_revision: Union[str, None] = '4fa11836f7fd'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create enum for setting types (only if not exists)
|
||||
op.execute("""
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE settingtype AS ENUM ('plaintext', 'encrypted', 'json');
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
""")
|
||||
|
||||
# Create system_settings table
|
||||
op.execute("""
|
||||
CREATE TABLE system_settings (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
setting_key VARCHAR(100) UNIQUE NOT NULL,
|
||||
setting_value TEXT,
|
||||
setting_type settingtype NOT NULL DEFAULT 'plaintext'::settingtype,
|
||||
description TEXT,
|
||||
updated_by UUID REFERENCES users(id) ON DELETE SET NULL,
|
||||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
is_sensitive BOOLEAN NOT NULL DEFAULT FALSE
|
||||
);
|
||||
|
||||
COMMENT ON COLUMN system_settings.setting_key IS 'Unique setting identifier (e.g., stripe_secret_key)';
|
||||
COMMENT ON COLUMN system_settings.setting_value IS 'Setting value (encrypted if setting_type is encrypted)';
|
||||
COMMENT ON COLUMN system_settings.setting_type IS 'Type of setting: plaintext, encrypted, or json';
|
||||
COMMENT ON COLUMN system_settings.description IS 'Human-readable description of the setting';
|
||||
COMMENT ON COLUMN system_settings.updated_by IS 'User who last updated this setting';
|
||||
COMMENT ON COLUMN system_settings.is_sensitive IS 'Whether this setting contains sensitive data';
|
||||
""")
|
||||
|
||||
# Create indexes
|
||||
op.create_index('idx_system_settings_key', 'system_settings', ['setting_key'])
|
||||
op.create_index('idx_system_settings_updated_at', 'system_settings', ['updated_at'])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop indexes
|
||||
op.drop_index('idx_system_settings_updated_at')
|
||||
op.drop_index('idx_system_settings_key')
|
||||
|
||||
# Drop table
|
||||
op.drop_table('system_settings')
|
||||
|
||||
# Drop enum
|
||||
op.execute('DROP TYPE IF EXISTS settingtype')
|
||||
92
check_all_columns.sql
Normal file
92
check_all_columns.sql
Normal file
@@ -0,0 +1,92 @@
|
||||
-- Comprehensive check for all missing columns
|
||||
-- Run: psql -h 10.9.23.11 -p 54321 -U postgres -d loaf_new -f check_all_columns.sql
|
||||
|
||||
\echo '================================================================'
|
||||
\echo 'COMPREHENSIVE COLUMN CHECK FOR ALL TABLES'
|
||||
\echo '================================================================'
|
||||
|
||||
-- ============================================================
|
||||
-- 1. USERS TABLE
|
||||
-- ============================================================
|
||||
\echo ''
|
||||
\echo '1. USERS TABLE - Expected: 60+ columns'
|
||||
\echo 'Checking for specific columns:'
|
||||
|
||||
SELECT
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'newsletter_publish_name') THEN '✓' ELSE '✗' END || ' newsletter_publish_name',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'volunteer_interests') THEN '✓' ELSE '✗' END || ' volunteer_interests',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'scholarship_requested') THEN '✓' ELSE '✗' END || ' scholarship_requested',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'show_in_directory') THEN '✓' ELSE '✗' END || ' show_in_directory',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'password_reset_token') THEN '✓' ELSE '✗' END || ' password_reset_token',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'accepts_tos') THEN '✓' ELSE '✗' END || ' accepts_tos',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'member_since') THEN '✓' ELSE '✗' END || ' member_since',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'rejection_reason') THEN '✓' ELSE '✗' END || ' rejection_reason',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'import_source') THEN '✓' ELSE '✗' END || ' import_source'
|
||||
\gx
|
||||
|
||||
-- ============================================================
|
||||
-- 2. EVENTS TABLE
|
||||
-- ============================================================
|
||||
\echo ''
|
||||
\echo '2. EVENTS TABLE'
|
||||
|
||||
SELECT
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'events' AND column_name = 'calendar_uid') THEN '✓' ELSE '✗' END || ' calendar_uid';
|
||||
|
||||
-- ============================================================
|
||||
-- 3. SUBSCRIPTIONS TABLE
|
||||
-- ============================================================
|
||||
\echo ''
|
||||
\echo '3. SUBSCRIPTIONS TABLE'
|
||||
|
||||
SELECT
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'subscriptions' AND column_name = 'base_subscription_cents') THEN '✓' ELSE '✗' END || ' base_subscription_cents',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'subscriptions' AND column_name = 'donation_cents') THEN '✓' ELSE '✗' END || ' donation_cents',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'subscriptions' AND column_name = 'manual_payment') THEN '✓' ELSE '✗' END || ' manual_payment'
|
||||
\gx
|
||||
|
||||
-- ============================================================
|
||||
-- 4. IMPORT_JOBS TABLE
|
||||
-- ============================================================
|
||||
\echo ''
|
||||
\echo '4. IMPORT_JOBS TABLE'
|
||||
|
||||
SELECT
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'import_jobs' AND column_name = 'field_mapping') THEN '✓' ELSE '✗' END || ' field_mapping',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'import_jobs' AND column_name = 'wordpress_metadata') THEN '✓' ELSE '✗' END || ' wordpress_metadata',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'import_jobs' AND column_name = 'imported_user_ids') THEN '✓' ELSE '✗' END || ' imported_user_ids',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'import_jobs' AND column_name = 'rollback_at') THEN '✓' ELSE '✗' END || ' rollback_at',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'import_jobs' AND column_name = 'rollback_by') THEN '✓' ELSE '✗' END || ' rollback_by'
|
||||
\gx
|
||||
|
||||
-- ============================================================
|
||||
-- 5. CHECK IF IMPORT_ROLLBACK_AUDIT TABLE EXISTS
|
||||
-- ============================================================
|
||||
\echo ''
|
||||
\echo '5. IMPORT_ROLLBACK_AUDIT TABLE - Should exist'
|
||||
SELECT CASE
|
||||
WHEN EXISTS (SELECT FROM information_schema.tables WHERE table_name = 'import_rollback_audit')
|
||||
THEN '✓ Table exists'
|
||||
ELSE '✗ TABLE MISSING - Need to create it'
|
||||
END AS status;
|
||||
|
||||
-- ============================================================
|
||||
-- SUMMARY: Count existing columns in each table
|
||||
-- ============================================================
|
||||
\echo ''
|
||||
\echo '================================================================'
|
||||
\echo 'SUMMARY: Column counts per table'
|
||||
\echo '================================================================'
|
||||
|
||||
SELECT
|
||||
table_name,
|
||||
COUNT(*) as column_count
|
||||
FROM information_schema.columns
|
||||
WHERE table_name IN (
|
||||
'users', 'events', 'event_rsvps', 'subscription_plans', 'subscriptions',
|
||||
'donations', 'event_galleries', 'newsletter_archives', 'financial_reports',
|
||||
'bylaws_documents', 'storage_usage', 'permissions', 'roles', 'role_permissions',
|
||||
'user_invitations', 'import_jobs', 'import_rollback_audit'
|
||||
)
|
||||
GROUP BY table_name
|
||||
ORDER BY table_name;
|
||||
345
check_db_integrity.py
Normal file
345
check_db_integrity.py
Normal file
@@ -0,0 +1,345 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Database Integrity Checker
|
||||
Compares schema and data integrity between development and production databases
|
||||
"""
|
||||
|
||||
import sys
|
||||
from sqlalchemy import create_engine, inspect, text
|
||||
from sqlalchemy.engine import reflection
|
||||
import json
|
||||
from collections import defaultdict
|
||||
|
||||
# Database URLs
|
||||
DEV_DB = "postgresql://postgres:RchhcpaUKZuZuMOvB5kwCP1weLBnAG6tNMXE5FHdk8AwCvolBMALYFVYRM7WCl9x@10.9.23.11:5001/membership_demo"
|
||||
PROD_DB = "postgresql://postgres:fDv3fRvMgfPueDWDUxj27NJVaynsewIdh6b2Hb28tcvG3Ew6mhscASg2kulx4tr7@10.9.23.11:54321/loaf_new"
|
||||
|
||||
def get_db_info(engine, label):
|
||||
"""Get comprehensive database information"""
|
||||
inspector = inspect(engine)
|
||||
|
||||
info = {
|
||||
'label': label,
|
||||
'tables': {},
|
||||
'indexes': {},
|
||||
'foreign_keys': {},
|
||||
'sequences': [],
|
||||
'enums': []
|
||||
}
|
||||
|
||||
# Get all table names
|
||||
table_names = inspector.get_table_names()
|
||||
|
||||
for table_name in table_names:
|
||||
# Get columns
|
||||
columns = inspector.get_columns(table_name)
|
||||
info['tables'][table_name] = {
|
||||
'columns': {
|
||||
col['name']: {
|
||||
'type': str(col['type']),
|
||||
'nullable': col['nullable'],
|
||||
'default': str(col.get('default', None)),
|
||||
'autoincrement': col.get('autoincrement', False)
|
||||
}
|
||||
for col in columns
|
||||
},
|
||||
'column_count': len(columns)
|
||||
}
|
||||
|
||||
# Get primary keys
|
||||
pk = inspector.get_pk_constraint(table_name)
|
||||
info['tables'][table_name]['primary_key'] = pk.get('constrained_columns', [])
|
||||
|
||||
# Get indexes
|
||||
indexes = inspector.get_indexes(table_name)
|
||||
info['indexes'][table_name] = [
|
||||
{
|
||||
'name': idx['name'],
|
||||
'columns': idx['column_names'],
|
||||
'unique': idx['unique']
|
||||
}
|
||||
for idx in indexes
|
||||
]
|
||||
|
||||
# Get foreign keys
|
||||
fks = inspector.get_foreign_keys(table_name)
|
||||
info['foreign_keys'][table_name] = [
|
||||
{
|
||||
'name': fk.get('name'),
|
||||
'columns': fk['constrained_columns'],
|
||||
'referred_table': fk['referred_table'],
|
||||
'referred_columns': fk['referred_columns']
|
||||
}
|
||||
for fk in fks
|
||||
]
|
||||
|
||||
# Get sequences
|
||||
with engine.connect() as conn:
|
||||
result = conn.execute(text("""
|
||||
SELECT sequence_name
|
||||
FROM information_schema.sequences
|
||||
WHERE sequence_schema = 'public'
|
||||
"""))
|
||||
info['sequences'] = [row[0] for row in result]
|
||||
|
||||
# Get enum types
|
||||
result = conn.execute(text("""
|
||||
SELECT t.typname as enum_name,
|
||||
array_agg(e.enumlabel ORDER BY e.enumsortorder) as enum_values
|
||||
FROM pg_type t
|
||||
JOIN pg_enum e ON t.oid = e.enumtypid
|
||||
WHERE t.typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = 'public')
|
||||
GROUP BY t.typname
|
||||
"""))
|
||||
info['enums'] = {row[0]: row[1] for row in result}
|
||||
|
||||
return info
|
||||
|
||||
def compare_tables(dev_info, prod_info):
|
||||
"""Compare tables between databases"""
|
||||
dev_tables = set(dev_info['tables'].keys())
|
||||
prod_tables = set(prod_info['tables'].keys())
|
||||
|
||||
print("\n" + "="*80)
|
||||
print("TABLE COMPARISON")
|
||||
print("="*80)
|
||||
|
||||
# Tables only in dev
|
||||
dev_only = dev_tables - prod_tables
|
||||
if dev_only:
|
||||
print(f"\n❌ Tables only in DEV ({len(dev_only)}):")
|
||||
for table in sorted(dev_only):
|
||||
print(f" - {table}")
|
||||
|
||||
# Tables only in prod
|
||||
prod_only = prod_tables - dev_tables
|
||||
if prod_only:
|
||||
print(f"\n❌ Tables only in PROD ({len(prod_only)}):")
|
||||
for table in sorted(prod_only):
|
||||
print(f" - {table}")
|
||||
|
||||
# Common tables
|
||||
common = dev_tables & prod_tables
|
||||
print(f"\n✅ Common tables: {len(common)}")
|
||||
|
||||
return common
|
||||
|
||||
def compare_columns(dev_info, prod_info, common_tables):
|
||||
"""Compare columns for common tables"""
|
||||
print("\n" + "="*80)
|
||||
print("COLUMN COMPARISON")
|
||||
print("="*80)
|
||||
|
||||
issues = []
|
||||
|
||||
for table in sorted(common_tables):
|
||||
dev_cols = set(dev_info['tables'][table]['columns'].keys())
|
||||
prod_cols = set(prod_info['tables'][table]['columns'].keys())
|
||||
|
||||
dev_only = dev_cols - prod_cols
|
||||
prod_only = prod_cols - dev_cols
|
||||
|
||||
if dev_only or prod_only:
|
||||
print(f"\n⚠️ Table '{table}' has column differences:")
|
||||
|
||||
if dev_only:
|
||||
print(f" Columns only in DEV: {', '.join(sorted(dev_only))}")
|
||||
issues.append(f"{table}: DEV-only columns: {', '.join(dev_only)}")
|
||||
|
||||
if prod_only:
|
||||
print(f" Columns only in PROD: {', '.join(sorted(prod_only))}")
|
||||
issues.append(f"{table}: PROD-only columns: {', '.join(prod_only)}")
|
||||
|
||||
# Compare column types for common columns
|
||||
common_cols = dev_cols & prod_cols
|
||||
for col in common_cols:
|
||||
dev_col = dev_info['tables'][table]['columns'][col]
|
||||
prod_col = prod_info['tables'][table]['columns'][col]
|
||||
|
||||
if dev_col['type'] != prod_col['type']:
|
||||
print(f" ⚠️ Column '{col}' type mismatch:")
|
||||
print(f" DEV: {dev_col['type']}")
|
||||
print(f" PROD: {prod_col['type']}")
|
||||
issues.append(f"{table}.{col}: Type mismatch")
|
||||
|
||||
if dev_col['nullable'] != prod_col['nullable']:
|
||||
print(f" ⚠️ Column '{col}' nullable mismatch:")
|
||||
print(f" DEV: {dev_col['nullable']}")
|
||||
print(f" PROD: {prod_col['nullable']}")
|
||||
issues.append(f"{table}.{col}: Nullable mismatch")
|
||||
|
||||
if not issues:
|
||||
print("\n✅ All columns match between DEV and PROD")
|
||||
|
||||
return issues
|
||||
|
||||
def compare_enums(dev_info, prod_info):
|
||||
"""Compare enum types"""
|
||||
print("\n" + "="*80)
|
||||
print("ENUM TYPE COMPARISON")
|
||||
print("="*80)
|
||||
|
||||
dev_enums = set(dev_info['enums'].keys())
|
||||
prod_enums = set(prod_info['enums'].keys())
|
||||
|
||||
dev_only = dev_enums - prod_enums
|
||||
prod_only = prod_enums - dev_enums
|
||||
|
||||
issues = []
|
||||
|
||||
if dev_only:
|
||||
print(f"\n❌ Enums only in DEV: {', '.join(sorted(dev_only))}")
|
||||
issues.extend([f"Enum '{e}' only in DEV" for e in dev_only])
|
||||
|
||||
if prod_only:
|
||||
print(f"\n❌ Enums only in PROD: {', '.join(sorted(prod_only))}")
|
||||
issues.extend([f"Enum '{e}' only in PROD" for e in prod_only])
|
||||
|
||||
# Compare enum values for common enums
|
||||
common = dev_enums & prod_enums
|
||||
for enum_name in sorted(common):
|
||||
dev_values = set(dev_info['enums'][enum_name])
|
||||
prod_values = set(prod_info['enums'][enum_name])
|
||||
|
||||
if dev_values != prod_values:
|
||||
print(f"\n⚠️ Enum '{enum_name}' values differ:")
|
||||
print(f" DEV: {', '.join(sorted(dev_values))}")
|
||||
print(f" PROD: {', '.join(sorted(prod_values))}")
|
||||
issues.append(f"Enum '{enum_name}' values differ")
|
||||
|
||||
if not issues:
|
||||
print("\n✅ All enum types match")
|
||||
|
||||
return issues
|
||||
|
||||
def check_migration_history(dev_engine, prod_engine):
|
||||
"""Check Alembic migration history"""
|
||||
print("\n" + "="*80)
|
||||
print("MIGRATION HISTORY")
|
||||
print("="*80)
|
||||
|
||||
try:
|
||||
with dev_engine.connect() as dev_conn:
|
||||
dev_result = dev_conn.execute(text("SELECT version_num FROM alembic_version"))
|
||||
dev_version = dev_result.fetchone()
|
||||
dev_version = dev_version[0] if dev_version else None
|
||||
|
||||
with prod_engine.connect() as prod_conn:
|
||||
prod_result = prod_conn.execute(text("SELECT version_num FROM alembic_version"))
|
||||
prod_version = prod_result.fetchone()
|
||||
prod_version = prod_version[0] if prod_version else None
|
||||
|
||||
print(f"\nDEV migration version: {dev_version}")
|
||||
print(f"PROD migration version: {prod_version}")
|
||||
|
||||
if dev_version == prod_version:
|
||||
print("✅ Migration versions match")
|
||||
return []
|
||||
else:
|
||||
print("❌ Migration versions DO NOT match")
|
||||
return ["Migration versions differ"]
|
||||
|
||||
except Exception as e:
|
||||
print(f"⚠️ Could not check migration history: {str(e)}")
|
||||
return [f"Migration check failed: {str(e)}"]
|
||||
|
||||
def get_row_counts(engine, tables):
|
||||
"""Get row counts for all tables"""
|
||||
counts = {}
|
||||
with engine.connect() as conn:
|
||||
for table in tables:
|
||||
result = conn.execute(text(f"SELECT COUNT(*) FROM {table}"))
|
||||
counts[table] = result.fetchone()[0]
|
||||
return counts
|
||||
|
||||
def compare_data_counts(dev_engine, prod_engine, common_tables):
|
||||
"""Compare row counts between databases"""
|
||||
print("\n" + "="*80)
|
||||
print("DATA ROW COUNTS")
|
||||
print("="*80)
|
||||
|
||||
print("\nGetting DEV row counts...")
|
||||
dev_counts = get_row_counts(dev_engine, common_tables)
|
||||
|
||||
print("Getting PROD row counts...")
|
||||
prod_counts = get_row_counts(prod_engine, common_tables)
|
||||
|
||||
print(f"\n{'Table':<30} {'DEV':<15} {'PROD':<15} {'Diff':<15}")
|
||||
print("-" * 75)
|
||||
|
||||
for table in sorted(common_tables):
|
||||
dev_count = dev_counts[table]
|
||||
prod_count = prod_counts[table]
|
||||
diff = dev_count - prod_count
|
||||
diff_str = f"+{diff}" if diff > 0 else str(diff)
|
||||
|
||||
status = "⚠️ " if abs(diff) > 0 else "✅"
|
||||
print(f"{status} {table:<28} {dev_count:<15} {prod_count:<15} {diff_str:<15}")
|
||||
|
||||
def main():
|
||||
print("\n" + "="*80)
|
||||
print("DATABASE INTEGRITY CHECKER")
|
||||
print("="*80)
|
||||
print(f"\nDEV: {DEV_DB.split('@')[1]}") # Hide password
|
||||
print(f"PROD: {PROD_DB.split('@')[1]}")
|
||||
|
||||
try:
|
||||
# Connect to databases
|
||||
print("\n🔌 Connecting to databases...")
|
||||
dev_engine = create_engine(DEV_DB)
|
||||
prod_engine = create_engine(PROD_DB)
|
||||
|
||||
# Test connections
|
||||
with dev_engine.connect() as conn:
|
||||
conn.execute(text("SELECT 1"))
|
||||
print("✅ Connected to DEV database")
|
||||
|
||||
with prod_engine.connect() as conn:
|
||||
conn.execute(text("SELECT 1"))
|
||||
print("✅ Connected to PROD database")
|
||||
|
||||
# Get database info
|
||||
print("\n📊 Gathering database information...")
|
||||
dev_info = get_db_info(dev_engine, "DEV")
|
||||
prod_info = get_db_info(prod_engine, "PROD")
|
||||
|
||||
# Run comparisons
|
||||
all_issues = []
|
||||
|
||||
common_tables = compare_tables(dev_info, prod_info)
|
||||
|
||||
column_issues = compare_columns(dev_info, prod_info, common_tables)
|
||||
all_issues.extend(column_issues)
|
||||
|
||||
enum_issues = compare_enums(dev_info, prod_info)
|
||||
all_issues.extend(enum_issues)
|
||||
|
||||
migration_issues = check_migration_history(dev_engine, prod_engine)
|
||||
all_issues.extend(migration_issues)
|
||||
|
||||
compare_data_counts(dev_engine, prod_engine, common_tables)
|
||||
|
||||
# Summary
|
||||
print("\n" + "="*80)
|
||||
print("SUMMARY")
|
||||
print("="*80)
|
||||
|
||||
if all_issues:
|
||||
print(f"\n❌ Found {len(all_issues)} integrity issues:")
|
||||
for i, issue in enumerate(all_issues, 1):
|
||||
print(f" {i}. {issue}")
|
||||
print("\n⚠️ Databases are NOT in sync!")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("\n✅ Databases are in sync!")
|
||||
print("✅ No integrity issues found")
|
||||
sys.exit(0)
|
||||
|
||||
except Exception as e:
|
||||
print(f"\n❌ Error: {str(e)}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
17
database.py
17
database.py
@@ -1,6 +1,7 @@
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.pool import QueuePool
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
from pathlib import Path
|
||||
@@ -10,7 +11,21 @@ load_dotenv(ROOT_DIR / '.env')
|
||||
|
||||
DATABASE_URL = os.environ.get('DATABASE_URL', 'postgresql://user:password@localhost:5432/membership_db')
|
||||
|
||||
engine = create_engine(DATABASE_URL)
|
||||
# Configure engine with connection pooling and connection health checks
|
||||
engine = create_engine(
|
||||
DATABASE_URL,
|
||||
poolclass=QueuePool,
|
||||
pool_size=5, # Keep 5 connections open
|
||||
max_overflow=10, # Allow up to 10 extra connections during peak
|
||||
pool_pre_ping=True, # CRITICAL: Test connections before using them
|
||||
pool_recycle=3600, # Recycle connections every hour (prevents stale connections)
|
||||
echo=False, # Set to True for SQL debugging
|
||||
connect_args={
|
||||
'connect_timeout': 10, # Timeout connection attempts after 10 seconds
|
||||
'options': '-c statement_timeout=30000' # 30 second query timeout
|
||||
}
|
||||
)
|
||||
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
14
docker-compose.yml
Normal file
14
docker-compose.yml
Normal file
@@ -0,0 +1,14 @@
|
||||
services:
|
||||
backend:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile # Use Dockerfile.prod for production
|
||||
ports:
|
||||
- "8000:8000"
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
DATABASE_URL: ${DATABASE_URL}
|
||||
volumes:
|
||||
- .:/app # sync code for hot reload
|
||||
|
||||
122
encryption_service.py
Normal file
122
encryption_service.py
Normal file
@@ -0,0 +1,122 @@
|
||||
"""
|
||||
Encryption service for sensitive settings stored in database.
|
||||
|
||||
Uses Fernet symmetric encryption (AES-128 in CBC mode with HMAC authentication).
|
||||
The encryption key is derived from a master secret stored in .env.
|
||||
"""
|
||||
|
||||
import os
|
||||
import base64
|
||||
from cryptography.fernet import Fernet
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
|
||||
|
||||
class EncryptionService:
|
||||
"""Service for encrypting and decrypting sensitive configuration values"""
|
||||
|
||||
def __init__(self):
|
||||
# Get master encryption key from environment
|
||||
# This should be a long, random string (e.g., 64 characters)
|
||||
# Generate one with: python -c "import secrets; print(secrets.token_urlsafe(64))"
|
||||
self.master_secret = os.environ.get('SETTINGS_ENCRYPTION_KEY')
|
||||
|
||||
if not self.master_secret:
|
||||
raise ValueError(
|
||||
"SETTINGS_ENCRYPTION_KEY environment variable not set. "
|
||||
"Generate one with: python -c \"import secrets; print(secrets.token_urlsafe(64))\""
|
||||
)
|
||||
|
||||
# Derive encryption key from master secret using PBKDF2HMAC
|
||||
# This adds an extra layer of security
|
||||
kdf = PBKDF2HMAC(
|
||||
algorithm=hashes.SHA256(),
|
||||
length=32,
|
||||
salt=b'systemsettings', # Fixed salt (OK for key derivation from strong secret)
|
||||
iterations=100000,
|
||||
backend=default_backend()
|
||||
)
|
||||
key = base64.urlsafe_b64encode(kdf.derive(self.master_secret.encode()))
|
||||
self.cipher = Fernet(key)
|
||||
|
||||
def encrypt(self, plaintext: str) -> str:
|
||||
"""
|
||||
Encrypt a plaintext string.
|
||||
|
||||
Args:
|
||||
plaintext: The string to encrypt
|
||||
|
||||
Returns:
|
||||
Base64-encoded encrypted string
|
||||
"""
|
||||
if not plaintext:
|
||||
return ""
|
||||
|
||||
encrypted_bytes = self.cipher.encrypt(plaintext.encode())
|
||||
return encrypted_bytes.decode('utf-8')
|
||||
|
||||
def decrypt(self, encrypted: str) -> str:
|
||||
"""
|
||||
Decrypt an encrypted string.
|
||||
|
||||
Args:
|
||||
encrypted: The base64-encoded encrypted string
|
||||
|
||||
Returns:
|
||||
Decrypted plaintext string
|
||||
|
||||
Raises:
|
||||
cryptography.fernet.InvalidToken: If decryption fails (wrong key or corrupted data)
|
||||
"""
|
||||
if not encrypted:
|
||||
return ""
|
||||
|
||||
decrypted_bytes = self.cipher.decrypt(encrypted.encode())
|
||||
return decrypted_bytes.decode('utf-8')
|
||||
|
||||
def is_encrypted(self, value: str) -> bool:
|
||||
"""
|
||||
Check if a value appears to be encrypted (starts with Fernet token format).
|
||||
|
||||
This is a heuristic check - not 100% reliable but useful for validation.
|
||||
|
||||
Args:
|
||||
value: String to check
|
||||
|
||||
Returns:
|
||||
True if value looks like a Fernet token
|
||||
"""
|
||||
if not value:
|
||||
return False
|
||||
|
||||
# Fernet tokens are base64-encoded and start with version byte (gAAAAA...)
|
||||
# They're always > 60 characters
|
||||
try:
|
||||
return len(value) > 60 and value.startswith('gAAAAA')
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
# Global encryption service instance
|
||||
# Initialize on module import so it fails fast if encryption key is missing
|
||||
try:
|
||||
encryption_service = EncryptionService()
|
||||
except ValueError as e:
|
||||
print(f"WARNING: {e}")
|
||||
print("Encryption service will not be available.")
|
||||
encryption_service = None
|
||||
|
||||
|
||||
def get_encryption_service() -> EncryptionService:
|
||||
"""
|
||||
Get the global encryption service instance.
|
||||
|
||||
Raises:
|
||||
ValueError: If encryption service is not initialized (missing SETTINGS_ENCRYPTION_KEY)
|
||||
"""
|
||||
if encryption_service is None:
|
||||
raise ValueError(
|
||||
"Encryption service not initialized. Set SETTINGS_ENCRYPTION_KEY environment variable."
|
||||
)
|
||||
return encryption_service
|
||||
216
migrations/005_complete_permissions.sql
Normal file
216
migrations/005_complete_permissions.sql
Normal file
@@ -0,0 +1,216 @@
|
||||
-- ============================================================================
|
||||
-- Complete Permission Set (60 permissions from development)
|
||||
-- Run this to sync production with development permissions
|
||||
-- ============================================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Delete old permissions and mappings
|
||||
DELETE FROM role_permissions;
|
||||
DELETE FROM permissions;
|
||||
|
||||
-- ============================================================================
|
||||
-- Create ALL 60 permissions (matching development)
|
||||
-- ============================================================================
|
||||
|
||||
INSERT INTO permissions (id, code, name, description, module, created_at)
|
||||
VALUES
|
||||
-- Users Permissions (11)
|
||||
(gen_random_uuid(), 'users.view', 'View Users', 'View user list and profiles', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.create', 'Create Users', 'Create new users', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.edit', 'Edit Users', 'Edit user information', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.delete', 'Delete Users', 'Delete users', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.status', 'Change User Status', 'Update user status', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.approve', 'Approve Users', 'Approve pending memberships', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.export', 'Export Users', 'Export users to CSV', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.import', 'Import Users', 'Import users from CSV', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.reset_password', 'Reset User Password', 'Reset user passwords', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.resend_verification', 'Resend Verification', 'Resend email verification', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.invite', 'Invite Users', 'Send user invitations', 'users', NOW()),
|
||||
|
||||
-- Events Permissions (8)
|
||||
(gen_random_uuid(), 'events.view', 'View Events', 'View event list', 'events', NOW()),
|
||||
(gen_random_uuid(), 'events.create', 'Create Events', 'Create new events', 'events', NOW()),
|
||||
(gen_random_uuid(), 'events.edit', 'Edit Events', 'Edit event information', 'events', NOW()),
|
||||
(gen_random_uuid(), 'events.delete', 'Delete Events', 'Delete events', 'events', NOW()),
|
||||
(gen_random_uuid(), 'events.publish', 'Publish Events', 'Publish/unpublish events', 'events', NOW()),
|
||||
(gen_random_uuid(), 'events.attendance', 'Manage Attendance', 'Mark attendance', 'events', NOW()),
|
||||
(gen_random_uuid(), 'events.rsvps', 'View RSVPs', 'View event RSVPs', 'events', NOW()),
|
||||
(gen_random_uuid(), 'events.calendar_export', 'Export Calendar', 'Export events to calendar', 'events', NOW()),
|
||||
|
||||
-- Subscriptions Permissions (7)
|
||||
(gen_random_uuid(), 'subscriptions.view', 'View Subscriptions', 'View user subscriptions', 'subscriptions', NOW()),
|
||||
(gen_random_uuid(), 'subscriptions.create', 'Create Subscriptions', 'Create new subscriptions', 'subscriptions', NOW()),
|
||||
(gen_random_uuid(), 'subscriptions.edit', 'Edit Subscriptions', 'Edit user subscriptions', 'subscriptions', NOW()),
|
||||
(gen_random_uuid(), 'subscriptions.cancel', 'Cancel Subscriptions', 'Cancel subscriptions', 'subscriptions', NOW()),
|
||||
(gen_random_uuid(), 'subscriptions.activate', 'Activate Subscriptions', 'Manually activate subscriptions', 'subscriptions', NOW()),
|
||||
(gen_random_uuid(), 'subscriptions.plans', 'Manage Plans', 'Manage subscription plans', 'subscriptions', NOW()),
|
||||
(gen_random_uuid(), 'subscriptions.export', 'Export Subscriptions', 'Export subscription data', 'subscriptions', NOW()),
|
||||
|
||||
-- Donations Permissions (2)
|
||||
(gen_random_uuid(), 'donations.view', 'View Donations', 'View donation records', 'donations', NOW()),
|
||||
(gen_random_uuid(), 'donations.export', 'Export Donations', 'Export donation data', 'donations', NOW()),
|
||||
|
||||
-- Financials Permissions (6)
|
||||
(gen_random_uuid(), 'financials.view', 'View Financial Reports', 'View financial reports', 'financials', NOW()),
|
||||
(gen_random_uuid(), 'financials.create', 'Create Financial Reports', 'Upload financial reports', 'financials', NOW()),
|
||||
(gen_random_uuid(), 'financials.edit', 'Edit Financial Reports', 'Edit financial reports', 'financials', NOW()),
|
||||
(gen_random_uuid(), 'financials.delete', 'Delete Financial Reports', 'Delete financial reports', 'financials', NOW()),
|
||||
(gen_random_uuid(), 'financials.export', 'Export Financial Data', 'Export financial data', 'financials', NOW()),
|
||||
(gen_random_uuid(), 'financials.payments', 'Manage Payments', 'Process manual payments', 'financials', NOW()),
|
||||
|
||||
-- Newsletters Permissions (6)
|
||||
(gen_random_uuid(), 'newsletters.view', 'View Newsletters', 'View newsletter archives', 'newsletters', NOW()),
|
||||
(gen_random_uuid(), 'newsletters.create', 'Create Newsletters', 'Upload newsletter archives', 'newsletters', NOW()),
|
||||
(gen_random_uuid(), 'newsletters.edit', 'Edit Newsletters', 'Edit newsletter archives', 'newsletters', NOW()),
|
||||
(gen_random_uuid(), 'newsletters.delete', 'Delete Newsletters', 'Delete newsletter archives', 'newsletters', NOW()),
|
||||
(gen_random_uuid(), 'newsletters.send', 'Send Newsletters', 'Send newsletters to subscribers', 'newsletters', NOW()),
|
||||
(gen_random_uuid(), 'newsletters.subscribers', 'Manage Subscribers', 'Manage newsletter subscribers', 'newsletters', NOW()),
|
||||
|
||||
-- Bylaws Permissions (5)
|
||||
(gen_random_uuid(), 'bylaws.view', 'View Bylaws', 'View bylaws documents', 'bylaws', NOW()),
|
||||
(gen_random_uuid(), 'bylaws.create', 'Create Bylaws', 'Upload bylaws documents', 'bylaws', NOW()),
|
||||
(gen_random_uuid(), 'bylaws.edit', 'Edit Bylaws', 'Edit bylaws documents', 'bylaws', NOW()),
|
||||
(gen_random_uuid(), 'bylaws.delete', 'Delete Bylaws', 'Delete bylaws documents', 'bylaws', NOW()),
|
||||
(gen_random_uuid(), 'bylaws.publish', 'Publish Bylaws', 'Mark bylaws as current', 'bylaws', NOW()),
|
||||
|
||||
-- Gallery Permissions (5)
|
||||
(gen_random_uuid(), 'gallery.view', 'View Gallery', 'View event galleries', 'gallery', NOW()),
|
||||
(gen_random_uuid(), 'gallery.upload', 'Upload Photos', 'Upload event photos', 'gallery', NOW()),
|
||||
(gen_random_uuid(), 'gallery.edit', 'Edit Gallery', 'Edit photo captions', 'gallery', NOW()),
|
||||
(gen_random_uuid(), 'gallery.delete', 'Delete Photos', 'Delete event photos', 'gallery', NOW()),
|
||||
(gen_random_uuid(), 'gallery.moderate', 'Moderate Gallery', 'Approve/reject gallery submissions', 'gallery', NOW()),
|
||||
|
||||
-- Settings Permissions (6)
|
||||
(gen_random_uuid(), 'settings.view', 'View Settings', 'View system settings', 'settings', NOW()),
|
||||
(gen_random_uuid(), 'settings.edit', 'Edit Settings', 'Edit system settings', 'settings', NOW()),
|
||||
(gen_random_uuid(), 'settings.email_templates', 'Manage Email Templates', 'Edit email templates', 'settings', NOW()),
|
||||
(gen_random_uuid(), 'settings.storage', 'View Storage Usage', 'View storage usage statistics', 'settings', NOW()),
|
||||
(gen_random_uuid(), 'settings.backup', 'Backup System', 'Create system backups', 'settings', NOW()),
|
||||
(gen_random_uuid(), 'settings.logs', 'View Logs', 'View system logs', 'settings', NOW()),
|
||||
|
||||
-- Permissions Management (4)
|
||||
(gen_random_uuid(), 'permissions.view', 'View Permissions', 'View permission list', 'permissions', NOW()),
|
||||
(gen_random_uuid(), 'permissions.assign', 'Assign Permissions', 'Assign permissions to roles', 'permissions', NOW()),
|
||||
(gen_random_uuid(), 'permissions.manage_roles', 'Manage Roles', 'Create/edit roles', 'permissions', NOW()),
|
||||
(gen_random_uuid(), 'permissions.audit', 'View Audit Logs', 'View permission audit logs', 'permissions', NOW())
|
||||
|
||||
ON CONFLICT (code) DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- Assign Permissions to Roles
|
||||
-- ============================================================================
|
||||
|
||||
-- Guest Role: No permissions
|
||||
|
||||
-- Member Role: Basic viewing only
|
||||
INSERT INTO role_permissions (id, role, role_id, permission_id, created_at)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
'member',
|
||||
(SELECT id FROM roles WHERE code = 'member'),
|
||||
p.id,
|
||||
NOW()
|
||||
FROM permissions p
|
||||
WHERE p.code IN (
|
||||
'events.view',
|
||||
'gallery.view',
|
||||
'bylaws.view',
|
||||
'newsletters.view'
|
||||
)
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
-- Admin Role: Most permissions except financials and permissions management
|
||||
INSERT INTO role_permissions (id, role, role_id, permission_id, created_at)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
'admin',
|
||||
(SELECT id FROM roles WHERE code = 'admin'),
|
||||
p.id,
|
||||
NOW()
|
||||
FROM permissions p
|
||||
WHERE p.code IN (
|
||||
-- User Management
|
||||
'users.view', 'users.create', 'users.edit', 'users.approve', 'users.import',
|
||||
'users.export', 'users.status', 'users.reset_password', 'users.resend_verification', 'users.invite',
|
||||
|
||||
-- Event Management
|
||||
'events.view', 'events.create', 'events.edit', 'events.delete', 'events.publish',
|
||||
'events.rsvps', 'events.attendance', 'events.calendar_export',
|
||||
|
||||
-- Gallery
|
||||
'gallery.view', 'gallery.upload', 'gallery.edit', 'gallery.delete', 'gallery.moderate',
|
||||
|
||||
-- Content
|
||||
'newsletters.view', 'newsletters.create', 'newsletters.edit', 'newsletters.delete',
|
||||
'newsletters.send', 'newsletters.subscribers',
|
||||
'bylaws.view', 'bylaws.create', 'bylaws.edit', 'bylaws.delete', 'bylaws.publish',
|
||||
|
||||
-- Settings (limited)
|
||||
'settings.view', 'settings.storage', 'settings.logs'
|
||||
)
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
-- Finance Role: Financial permissions + basic viewing
|
||||
INSERT INTO role_permissions (id, role, role_id, permission_id, created_at)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
'finance',
|
||||
(SELECT id FROM roles WHERE code = 'finance'),
|
||||
p.id,
|
||||
NOW()
|
||||
FROM permissions p
|
||||
WHERE p.code IN (
|
||||
-- Subscriptions & Donations
|
||||
'subscriptions.view', 'subscriptions.create', 'subscriptions.plans', 'subscriptions.edit',
|
||||
'subscriptions.cancel', 'subscriptions.activate', 'subscriptions.export',
|
||||
'donations.view', 'donations.export',
|
||||
|
||||
-- Financial Reports
|
||||
'financials.view', 'financials.create', 'financials.edit', 'financials.delete',
|
||||
'financials.export', 'financials.payments',
|
||||
|
||||
-- Basic Access
|
||||
'users.view',
|
||||
'events.view',
|
||||
'bylaws.view',
|
||||
'newsletters.view'
|
||||
)
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
-- Superadmin Role: ALL 60 permissions
|
||||
INSERT INTO role_permissions (id, role, role_id, permission_id, created_at)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
'superadmin',
|
||||
(SELECT id FROM roles WHERE code = 'superadmin'),
|
||||
p.id,
|
||||
NOW()
|
||||
FROM permissions p
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
COMMIT;
|
||||
|
||||
\echo '✅ Complete permission set created!'
|
||||
\echo ''
|
||||
\echo 'Permission counts:'
|
||||
\echo ' Total permissions: 60'
|
||||
\echo ' - users: 11'
|
||||
\echo ' - events: 8'
|
||||
\echo ' - subscriptions: 7'
|
||||
\echo ' - donations: 2'
|
||||
\echo ' - financials: 6'
|
||||
\echo ' - newsletters: 6'
|
||||
\echo ' - bylaws: 5'
|
||||
\echo ' - gallery: 5'
|
||||
\echo ' - settings: 6'
|
||||
\echo ' - permissions: 4'
|
||||
\echo ''
|
||||
\echo 'Role assignments:'
|
||||
\echo ' - Guest: 0'
|
||||
\echo ' - Member: 4 (view only)'
|
||||
\echo ' - Admin: ~40'
|
||||
\echo ' - Finance: ~20'
|
||||
\echo ' - Superadmin: 60 (all)'
|
||||
\echo ''
|
||||
\echo 'Next: Restart backend with: pm2 restart membership-backend'
|
||||
38
models.py
38
models.py
@@ -137,6 +137,10 @@ class User(Base):
|
||||
wordpress_user_id = Column(BigInteger, nullable=True, comment="Original WordPress user ID")
|
||||
wordpress_registered_date = Column(DateTime(timezone=True), nullable=True, comment="Original WordPress registration date")
|
||||
|
||||
# Role Change Audit Trail
|
||||
role_changed_at = Column(DateTime(timezone=True), nullable=True, comment="Timestamp when role was last changed")
|
||||
role_changed_by = Column(UUID(as_uuid=True), ForeignKey('users.id', ondelete='SET NULL'), nullable=True, comment="Admin who changed the role")
|
||||
|
||||
created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
|
||||
|
||||
@@ -145,6 +149,7 @@ class User(Base):
|
||||
events_created = relationship("Event", back_populates="creator")
|
||||
rsvps = relationship("EventRSVP", back_populates="user")
|
||||
subscriptions = relationship("Subscription", back_populates="user", foreign_keys="Subscription.user_id")
|
||||
role_changer = relationship("User", foreign_keys=[role_changed_by], remote_side="User.id", post_update=True)
|
||||
|
||||
class Event(Base):
|
||||
__tablename__ = "events"
|
||||
@@ -509,3 +514,36 @@ class ImportRollbackAudit(Base):
|
||||
# Relationships
|
||||
import_job = relationship("ImportJob")
|
||||
admin_user = relationship("User", foreign_keys=[rolled_back_by])
|
||||
|
||||
|
||||
# ============================================================
|
||||
# System Settings Models
|
||||
# ============================================================
|
||||
|
||||
class SettingType(enum.Enum):
|
||||
plaintext = "plaintext"
|
||||
encrypted = "encrypted"
|
||||
json = "json"
|
||||
|
||||
|
||||
class SystemSettings(Base):
|
||||
"""System-wide configuration settings stored in database"""
|
||||
__tablename__ = "system_settings"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
setting_key = Column(String(100), unique=True, nullable=False, index=True)
|
||||
setting_value = Column(Text, nullable=True)
|
||||
setting_type = Column(SQLEnum(SettingType), default=SettingType.plaintext, nullable=False)
|
||||
description = Column(Text, nullable=True)
|
||||
updated_by = Column(UUID(as_uuid=True), ForeignKey("users.id", ondelete="SET NULL"), nullable=True)
|
||||
created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), nullable=False)
|
||||
updated_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc), nullable=False)
|
||||
is_sensitive = Column(Boolean, default=False, nullable=False)
|
||||
|
||||
# Relationships
|
||||
updater = relationship("User", foreign_keys=[updated_by])
|
||||
|
||||
# Index on updated_at for audit queries
|
||||
__table_args__ = (
|
||||
Index('idx_system_settings_updated_at', 'updated_at'),
|
||||
)
|
||||
|
||||
@@ -11,11 +11,9 @@ from datetime import datetime, timezone, timedelta
|
||||
# Load environment variables
|
||||
load_dotenv()
|
||||
|
||||
# Initialize Stripe with secret key
|
||||
stripe.api_key = os.getenv("STRIPE_SECRET_KEY")
|
||||
|
||||
# Stripe webhook secret for signature verification
|
||||
STRIPE_WEBHOOK_SECRET = os.getenv("STRIPE_WEBHOOK_SECRET")
|
||||
# NOTE: Stripe credentials are now database-driven
|
||||
# These .env fallbacks are kept for backward compatibility only
|
||||
# The actual credentials are loaded dynamically from system_settings table
|
||||
|
||||
def create_checkout_session(
|
||||
user_id: str,
|
||||
@@ -23,11 +21,15 @@ def create_checkout_session(
|
||||
plan_id: str,
|
||||
stripe_price_id: str,
|
||||
success_url: str,
|
||||
cancel_url: str
|
||||
cancel_url: str,
|
||||
db = None
|
||||
):
|
||||
"""
|
||||
Create a Stripe Checkout session for subscription payment.
|
||||
|
||||
Args:
|
||||
db: Database session (optional, for reading Stripe credentials from database)
|
||||
|
||||
Args:
|
||||
user_id: User's UUID
|
||||
user_email: User's email address
|
||||
@@ -39,6 +41,28 @@ def create_checkout_session(
|
||||
Returns:
|
||||
dict: Checkout session object with session ID and URL
|
||||
"""
|
||||
# Load Stripe API key from database if available
|
||||
if db:
|
||||
try:
|
||||
# Import here to avoid circular dependency
|
||||
from models import SystemSettings, SettingType
|
||||
from encryption_service import get_encryption_service
|
||||
|
||||
setting = db.query(SystemSettings).filter(
|
||||
SystemSettings.setting_key == 'stripe_secret_key'
|
||||
).first()
|
||||
|
||||
if setting and setting.setting_value:
|
||||
encryption_service = get_encryption_service()
|
||||
stripe.api_key = encryption_service.decrypt(setting.setting_value)
|
||||
except Exception as e:
|
||||
# Fallback to .env if database read fails
|
||||
print(f"Failed to read Stripe key from database: {e}")
|
||||
stripe.api_key = os.getenv("STRIPE_SECRET_KEY")
|
||||
else:
|
||||
# Fallback to .env if no db session
|
||||
stripe.api_key = os.getenv("STRIPE_SECRET_KEY")
|
||||
|
||||
try:
|
||||
# Create Checkout Session
|
||||
checkout_session = stripe.checkout.Session.create(
|
||||
@@ -74,13 +98,14 @@ def create_checkout_session(
|
||||
raise Exception(f"Stripe error: {str(e)}")
|
||||
|
||||
|
||||
def verify_webhook_signature(payload: bytes, sig_header: str) -> dict:
|
||||
def verify_webhook_signature(payload: bytes, sig_header: str, db=None) -> dict:
|
||||
"""
|
||||
Verify Stripe webhook signature and construct event.
|
||||
|
||||
Args:
|
||||
payload: Raw webhook payload bytes
|
||||
sig_header: Stripe signature header
|
||||
db: Database session (optional, for reading webhook secret from database)
|
||||
|
||||
Returns:
|
||||
dict: Verified webhook event
|
||||
@@ -88,9 +113,32 @@ def verify_webhook_signature(payload: bytes, sig_header: str) -> dict:
|
||||
Raises:
|
||||
ValueError: If signature verification fails
|
||||
"""
|
||||
# Load webhook secret from database if available
|
||||
webhook_secret = None
|
||||
if db:
|
||||
try:
|
||||
from models import SystemSettings
|
||||
from encryption_service import get_encryption_service
|
||||
|
||||
setting = db.query(SystemSettings).filter(
|
||||
SystemSettings.setting_key == 'stripe_webhook_secret'
|
||||
).first()
|
||||
|
||||
if setting and setting.setting_value:
|
||||
encryption_service = get_encryption_service()
|
||||
webhook_secret = encryption_service.decrypt(setting.setting_value)
|
||||
except Exception as e:
|
||||
print(f"Failed to read webhook secret from database: {e}")
|
||||
webhook_secret = os.getenv("STRIPE_WEBHOOK_SECRET")
|
||||
else:
|
||||
webhook_secret = os.getenv("STRIPE_WEBHOOK_SECRET")
|
||||
|
||||
if not webhook_secret:
|
||||
raise ValueError("STRIPE_WEBHOOK_SECRET not configured")
|
||||
|
||||
try:
|
||||
event = stripe.Webhook.construct_event(
|
||||
payload, sig_header, STRIPE_WEBHOOK_SECRET
|
||||
payload, sig_header, webhook_secret
|
||||
)
|
||||
return event
|
||||
except ValueError as e:
|
||||
|
||||
@@ -31,7 +31,7 @@ motor==3.3.1
|
||||
msal==1.27.0
|
||||
mypy==1.18.2
|
||||
mypy_extensions==1.1.0
|
||||
numpy==2.3.5
|
||||
numpy==2.2.6
|
||||
oauthlib==3.3.1
|
||||
packaging==25.0
|
||||
pandas==2.3.3
|
||||
|
||||
745
server.py
745
server.py
@@ -60,11 +60,29 @@ async def lifespan(app: FastAPI):
|
||||
# Shutdown
|
||||
logger.info("Application shutdown")
|
||||
|
||||
# Environment detection
|
||||
ENVIRONMENT = os.environ.get('ENVIRONMENT', 'development')
|
||||
IS_PRODUCTION = ENVIRONMENT == 'production'
|
||||
|
||||
# Security: Disable API documentation in production
|
||||
if IS_PRODUCTION:
|
||||
print("🔒 Production mode: API documentation disabled")
|
||||
app_config = {
|
||||
"lifespan": lifespan,
|
||||
"root_path": "/membership",
|
||||
"docs_url": None, # Disable /docs
|
||||
"redoc_url": None, # Disable /redoc
|
||||
"openapi_url": None # Disable /openapi.json
|
||||
}
|
||||
else:
|
||||
print("🔓 Development mode: API documentation enabled at /docs and /redoc")
|
||||
app_config = {
|
||||
"lifespan": lifespan,
|
||||
"root_path": "/membership"
|
||||
}
|
||||
|
||||
# Create the main app
|
||||
app = FastAPI(
|
||||
lifespan=lifespan,
|
||||
root_path="/membership" # Configure for serving under /membership path
|
||||
)
|
||||
app = FastAPI(**app_config)
|
||||
|
||||
# Create a router with the /api prefix
|
||||
api_router = APIRouter(prefix="/api")
|
||||
@@ -364,6 +382,9 @@ class AttendanceUpdate(BaseModel):
|
||||
user_id: str
|
||||
attended: bool
|
||||
|
||||
class BatchAttendanceUpdate(BaseModel):
|
||||
updates: list[AttendanceUpdate]
|
||||
|
||||
class UpdateUserStatusRequest(BaseModel):
|
||||
status: str
|
||||
|
||||
@@ -493,6 +514,10 @@ class AcceptInvitationRequest(BaseModel):
|
||||
zipcode: Optional[str] = None
|
||||
date_of_birth: Optional[datetime] = None
|
||||
|
||||
class ChangeRoleRequest(BaseModel):
|
||||
role: str
|
||||
role_id: Optional[str] = None # For custom roles
|
||||
|
||||
# Auth Routes
|
||||
@api_router.post("/auth/register")
|
||||
async def register(request: RegisterRequest, db: Session = Depends(get_db)):
|
||||
@@ -787,6 +812,53 @@ async def get_config():
|
||||
"max_file_size_mb": int(max_file_size_mb)
|
||||
}
|
||||
|
||||
@api_router.get("/diagnostics/cors")
|
||||
async def cors_diagnostics(request: Request):
|
||||
"""
|
||||
CORS Diagnostics Endpoint
|
||||
Shows current CORS configuration and request details for debugging
|
||||
|
||||
Use this to verify:
|
||||
1. What origins are allowed
|
||||
2. What origin is making the request
|
||||
3. Whether CORS is properly configured
|
||||
"""
|
||||
cors_origins_env = os.environ.get('CORS_ORIGINS', '')
|
||||
|
||||
if cors_origins_env:
|
||||
configured_origins = [origin.strip() for origin in cors_origins_env.split(',')]
|
||||
cors_status = "✅ CONFIGURED"
|
||||
else:
|
||||
configured_origins = [
|
||||
"http://localhost:3000",
|
||||
"http://localhost:8000",
|
||||
"http://127.0.0.1:3000",
|
||||
"http://127.0.0.1:8000"
|
||||
]
|
||||
cors_status = "⚠️ NOT CONFIGURED (using defaults)"
|
||||
|
||||
request_origin = request.headers.get('origin', 'None')
|
||||
origin_allowed = request_origin in configured_origins
|
||||
|
||||
return {
|
||||
"cors_status": cors_status,
|
||||
"environment": ENVIRONMENT,
|
||||
"cors_origins_env_variable": cors_origins_env or "(not set)",
|
||||
"allowed_origins": configured_origins,
|
||||
"request_origin": request_origin,
|
||||
"origin_allowed": origin_allowed,
|
||||
"diagnosis": {
|
||||
"cors_configured": bool(cors_origins_env),
|
||||
"origin_matches": origin_allowed,
|
||||
"issue": None if origin_allowed else f"Origin '{request_origin}' is not in allowed origins list"
|
||||
},
|
||||
"fix_instructions": None if origin_allowed else (
|
||||
f"Add to backend .env file:\n"
|
||||
f"CORS_ORIGINS={request_origin}"
|
||||
f"{(',' + ','.join(configured_origins)) if cors_origins_env else ''}"
|
||||
)
|
||||
}
|
||||
|
||||
# User Profile Routes
|
||||
@api_router.get("/users/profile", response_model=UserResponse)
|
||||
async def get_profile(current_user: User = Depends(get_current_user)):
|
||||
@@ -1499,7 +1571,14 @@ async def get_events(
|
||||
EventRSVP.rsvp_status == RSVPStatus.yes
|
||||
).count()
|
||||
|
||||
# No user_rsvp_status in public endpoint
|
||||
# Get current user's RSVP status for this event
|
||||
user_rsvp = db.query(EventRSVP).filter(
|
||||
EventRSVP.event_id == event.id,
|
||||
EventRSVP.user_id == current_user.id
|
||||
).first()
|
||||
|
||||
user_rsvp_status = user_rsvp.rsvp_status.value if user_rsvp else None
|
||||
|
||||
result.append(EventResponse(
|
||||
id=str(event.id),
|
||||
title=event.title,
|
||||
@@ -1512,7 +1591,7 @@ async def get_events(
|
||||
created_by=str(event.created_by),
|
||||
created_at=event.created_at,
|
||||
rsvp_count=rsvp_count,
|
||||
user_rsvp_status=None
|
||||
user_rsvp_status=user_rsvp_status
|
||||
))
|
||||
|
||||
return result
|
||||
@@ -1532,8 +1611,13 @@ async def get_event(
|
||||
EventRSVP.rsvp_status == RSVPStatus.yes
|
||||
).count()
|
||||
|
||||
# No user_rsvp_status in public endpoint
|
||||
user_rsvp = None
|
||||
# Get current user's RSVP status for this event
|
||||
user_rsvp = db.query(EventRSVP).filter(
|
||||
EventRSVP.event_id == event_id,
|
||||
EventRSVP.user_id == current_user.id
|
||||
).first()
|
||||
|
||||
user_rsvp_status = user_rsvp.rsvp_status.value if user_rsvp else None
|
||||
|
||||
return EventResponse(
|
||||
id=str(event.id),
|
||||
@@ -1547,7 +1631,7 @@ async def get_event(
|
||||
created_by=str(event.created_by),
|
||||
created_at=event.created_at,
|
||||
rsvp_count=rsvp_count,
|
||||
user_rsvp_status=user_rsvp
|
||||
user_rsvp_status=user_rsvp_status
|
||||
)
|
||||
|
||||
@api_router.post("/events/{event_id}/rsvp")
|
||||
@@ -1618,7 +1702,9 @@ async def get_my_event_activity(
|
||||
}
|
||||
|
||||
# Separate upcoming vs past events
|
||||
if event.end_at > now:
|
||||
# Ensure timezone-aware comparison
|
||||
event_end_at = event.end_at.replace(tzinfo=timezone.utc) if event.end_at.tzinfo is None else event.end_at
|
||||
if event_end_at > now:
|
||||
upcoming_events.append(event_data)
|
||||
else:
|
||||
past_events.append(event_data)
|
||||
@@ -2445,6 +2531,102 @@ async def admin_reset_user_password(
|
||||
|
||||
return {"message": f"Password reset for {user.email}. Temporary password emailed."}
|
||||
|
||||
@api_router.put("/admin/users/{user_id}/role")
|
||||
async def change_user_role(
|
||||
user_id: str,
|
||||
request: ChangeRoleRequest,
|
||||
current_user: User = Depends(require_permission("users.edit")),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Change an existing user's role with privilege escalation prevention.
|
||||
|
||||
Requires: users.edit permission
|
||||
|
||||
Rules:
|
||||
- Superadmin: Can assign any role (including superadmin)
|
||||
- Admin: Can assign admin, finance, member, guest, and non-elevated custom roles
|
||||
- Admin CANNOT assign: superadmin or custom roles with elevated permissions
|
||||
- Users CANNOT change their own role
|
||||
"""
|
||||
|
||||
# 1. Fetch target user
|
||||
target_user = db.query(User).filter(User.id == user_id).first()
|
||||
if not target_user:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
||||
# 2. Prevent self-role-change
|
||||
if str(target_user.id) == str(current_user.id):
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="You cannot change your own role"
|
||||
)
|
||||
|
||||
# 3. Validate new role
|
||||
if request.role not in ['guest', 'member', 'admin', 'finance', 'superadmin']:
|
||||
raise HTTPException(status_code=400, detail="Invalid role")
|
||||
|
||||
# 4. Privilege escalation check
|
||||
if current_user.role != 'superadmin':
|
||||
# Non-superadmin cannot assign superadmin role
|
||||
if request.role == 'superadmin':
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="Only superadmin can assign superadmin role"
|
||||
)
|
||||
|
||||
# Check custom role elevation
|
||||
if request.role_id:
|
||||
custom_role = db.query(Role).filter(Role.id == request.role_id).first()
|
||||
if not custom_role:
|
||||
raise HTTPException(status_code=404, detail="Custom role not found")
|
||||
|
||||
# Check if custom role has elevated permissions
|
||||
elevated_permissions = ['users.delete', 'roles.create', 'roles.edit',
|
||||
'roles.delete', 'permissions.edit']
|
||||
role_perms = db.query(Permission.name).join(RolePermission).filter(
|
||||
RolePermission.role_id == custom_role.id,
|
||||
Permission.name.in_(elevated_permissions)
|
||||
).all()
|
||||
|
||||
if role_perms:
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail=f"Cannot assign role with elevated permissions: {custom_role.name}"
|
||||
)
|
||||
|
||||
# 5. Update role with audit trail
|
||||
old_role = target_user.role
|
||||
old_role_id = target_user.role_id
|
||||
|
||||
target_user.role = request.role
|
||||
target_user.role_id = request.role_id if request.role_id else None
|
||||
target_user.role_changed_at = datetime.now(timezone.utc)
|
||||
target_user.role_changed_by = current_user.id
|
||||
target_user.updated_at = datetime.now(timezone.utc)
|
||||
|
||||
db.commit()
|
||||
db.refresh(target_user)
|
||||
|
||||
# Log admin action
|
||||
logger.info(
|
||||
f"Admin {current_user.email} changed role for user {target_user.email} "
|
||||
f"from {old_role} to {request.role}"
|
||||
)
|
||||
|
||||
return {
|
||||
"message": f"Role changed from {old_role} to {request.role}",
|
||||
"user": {
|
||||
"id": str(target_user.id),
|
||||
"email": target_user.email,
|
||||
"name": f"{target_user.first_name} {target_user.last_name}",
|
||||
"old_role": old_role,
|
||||
"new_role": target_user.role,
|
||||
"changed_by": f"{current_user.first_name} {current_user.last_name}",
|
||||
"changed_at": target_user.role_changed_at.isoformat()
|
||||
}
|
||||
}
|
||||
|
||||
@api_router.post("/admin/users/{user_id}/resend-verification")
|
||||
async def admin_resend_verification(
|
||||
user_id: str,
|
||||
@@ -2814,8 +2996,9 @@ async def verify_invitation_token(
|
||||
if not invitation:
|
||||
raise HTTPException(status_code=404, detail="Invalid or expired invitation token")
|
||||
|
||||
# Check expiry
|
||||
if invitation.expires_at < datetime.now(timezone.utc):
|
||||
# Check expiry (handle timezone-naive datetime from DB)
|
||||
expires_at_aware = invitation.expires_at.replace(tzinfo=timezone.utc) if invitation.expires_at.tzinfo is None else invitation.expires_at
|
||||
if expires_at_aware < datetime.now(timezone.utc):
|
||||
invitation.status = InvitationStatus.expired
|
||||
db.commit()
|
||||
raise HTTPException(status_code=400, detail="Invitation has expired")
|
||||
@@ -2847,8 +3030,9 @@ async def accept_invitation(
|
||||
if not invitation:
|
||||
raise HTTPException(status_code=404, detail="Invalid or expired invitation token")
|
||||
|
||||
# Check expiry
|
||||
if invitation.expires_at < datetime.now(timezone.utc):
|
||||
# Check expiry (handle timezone-naive datetime from DB)
|
||||
expires_at_aware = invitation.expires_at.replace(tzinfo=timezone.utc) if invitation.expires_at.tzinfo is None else invitation.expires_at
|
||||
if expires_at_aware < datetime.now(timezone.utc):
|
||||
invitation.status = InvitationStatus.expired
|
||||
db.commit()
|
||||
raise HTTPException(status_code=400, detail="Invitation has expired")
|
||||
@@ -3794,6 +3978,37 @@ async def update_event(
|
||||
|
||||
return {"message": "Event updated successfully"}
|
||||
|
||||
@api_router.get("/admin/events/{event_id}", response_model=EventResponse)
|
||||
async def get_admin_event(
|
||||
event_id: str,
|
||||
current_user: User = Depends(require_permission("events.view")),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""Get single event details (admin) - allows viewing unpublished events"""
|
||||
event = db.query(Event).filter(Event.id == event_id).first()
|
||||
if not event:
|
||||
raise HTTPException(status_code=404, detail="Event not found")
|
||||
|
||||
rsvp_count = db.query(EventRSVP).filter(
|
||||
EventRSVP.event_id == event.id,
|
||||
EventRSVP.rsvp_status == RSVPStatus.yes
|
||||
).count()
|
||||
|
||||
return EventResponse(
|
||||
id=str(event.id),
|
||||
title=event.title,
|
||||
description=event.description,
|
||||
start_at=event.start_at,
|
||||
end_at=event.end_at,
|
||||
location=event.location,
|
||||
capacity=event.capacity,
|
||||
published=event.published,
|
||||
created_by=str(event.created_by),
|
||||
created_at=event.created_at,
|
||||
rsvp_count=rsvp_count,
|
||||
user_rsvp_status=None
|
||||
)
|
||||
|
||||
@api_router.get("/admin/events/{event_id}/rsvps")
|
||||
async def get_event_rsvps(
|
||||
event_id: str,
|
||||
@@ -3824,46 +4039,53 @@ async def get_event_rsvps(
|
||||
@api_router.put("/admin/events/{event_id}/attendance")
|
||||
async def mark_attendance(
|
||||
event_id: str,
|
||||
request: AttendanceUpdate,
|
||||
request: BatchAttendanceUpdate,
|
||||
current_user: User = Depends(require_permission("events.attendance")),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""Mark attendance for one or more users (supports batch updates)"""
|
||||
event = db.query(Event).filter(Event.id == event_id).first()
|
||||
if not event:
|
||||
raise HTTPException(status_code=404, detail="Event not found")
|
||||
|
||||
rsvp = db.query(EventRSVP).filter(
|
||||
EventRSVP.event_id == event_id,
|
||||
EventRSVP.user_id == request.user_id
|
||||
).first()
|
||||
updated_count = 0
|
||||
|
||||
# Auto-create RSVP if it doesn't exist (for retroactive attendance marking)
|
||||
if not rsvp:
|
||||
rsvp = EventRSVP(
|
||||
event_id=event_id,
|
||||
user_id=request.user_id,
|
||||
rsvp_status=RSVPStatus.yes, # Default to 'yes' for attended events
|
||||
attended=False,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
updated_at=datetime.now(timezone.utc)
|
||||
)
|
||||
db.add(rsvp)
|
||||
db.flush() # Get the ID without committing
|
||||
# Process each update in the batch
|
||||
for update in request.updates:
|
||||
rsvp = db.query(EventRSVP).filter(
|
||||
EventRSVP.event_id == event_id,
|
||||
EventRSVP.user_id == update.user_id
|
||||
).first()
|
||||
|
||||
rsvp.attended = request.attended
|
||||
rsvp.attended_at = datetime.now(timezone.utc) if request.attended else None
|
||||
rsvp.updated_at = datetime.now(timezone.utc)
|
||||
# Auto-create RSVP if it doesn't exist (for retroactive attendance marking)
|
||||
if not rsvp:
|
||||
rsvp = EventRSVP(
|
||||
event_id=event_id,
|
||||
user_id=update.user_id,
|
||||
rsvp_status=RSVPStatus.yes, # Default to 'yes' for attended events
|
||||
attended=False,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
updated_at=datetime.now(timezone.utc)
|
||||
)
|
||||
db.add(rsvp)
|
||||
db.flush() # Get the ID without committing
|
||||
|
||||
# If user attended and they were pending validation, update their status
|
||||
if request.attended:
|
||||
user = db.query(User).filter(User.id == request.user_id).first()
|
||||
if user and user.status == UserStatus.pending_validation:
|
||||
user.status = UserStatus.pre_validated
|
||||
user.updated_at = datetime.now(timezone.utc)
|
||||
rsvp.attended = update.attended
|
||||
rsvp.attended_at = datetime.now(timezone.utc) if update.attended else None
|
||||
rsvp.updated_at = datetime.now(timezone.utc)
|
||||
|
||||
# If user attended and they were pending validation, update their status
|
||||
if update.attended:
|
||||
user = db.query(User).filter(User.id == update.user_id).first()
|
||||
if user and user.status == UserStatus.pending_validation:
|
||||
user.status = UserStatus.pre_validated
|
||||
user.updated_at = datetime.now(timezone.utc)
|
||||
|
||||
updated_count += 1
|
||||
|
||||
db.commit()
|
||||
|
||||
return {"message": "Attendance marked successfully"}
|
||||
return {"message": f"Attendance marked successfully for {updated_count} {'person' if updated_count == 1 else 'people'}"}
|
||||
|
||||
@api_router.get("/admin/events")
|
||||
async def get_admin_events(
|
||||
@@ -5185,6 +5407,101 @@ async def get_all_roles(
|
||||
for role, count in roles_with_counts
|
||||
]
|
||||
|
||||
@api_router.get("/admin/roles/assignable", response_model=List[RoleResponse])
|
||||
async def get_assignable_roles(
|
||||
current_user: User = Depends(require_permission("users.create")),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get roles that the current user can assign when inviting staff
|
||||
|
||||
- Superadmin: Can assign all roles
|
||||
- Admin: Can assign admin, finance, and non-elevated custom roles
|
||||
- Returns roles filtered by user's permission level
|
||||
"""
|
||||
from sqlalchemy import func
|
||||
|
||||
# Query all roles with permission counts
|
||||
roles_query = db.query(
|
||||
Role,
|
||||
func.count(RolePermission.id).label('permission_count')
|
||||
).outerjoin(RolePermission, Role.id == RolePermission.role_id)\
|
||||
.group_by(Role.id)\
|
||||
.order_by(Role.is_system_role.desc(), Role.name)
|
||||
|
||||
all_roles = roles_query.all()
|
||||
|
||||
# Superadmin can assign any role
|
||||
if current_user.role == UserRole.superadmin:
|
||||
return [
|
||||
{
|
||||
"id": str(role.id),
|
||||
"code": role.code,
|
||||
"name": role.name,
|
||||
"description": role.description,
|
||||
"is_system_role": role.is_system_role,
|
||||
"created_at": role.created_at,
|
||||
"updated_at": role.updated_at,
|
||||
"permission_count": count
|
||||
}
|
||||
for role, count in all_roles
|
||||
]
|
||||
|
||||
# Admin users can assign: admin, finance, and non-elevated custom roles
|
||||
# Get admin role's permissions to check for elevation
|
||||
admin_role = db.query(Role).filter(Role.code == "admin").first()
|
||||
admin_permission_codes = set()
|
||||
if admin_role:
|
||||
admin_permissions = db.query(RolePermission).filter(
|
||||
RolePermission.role_id == admin_role.id
|
||||
).all()
|
||||
admin_permission_codes = {rp.permission_id for rp in admin_permissions}
|
||||
|
||||
assignable_roles = []
|
||||
for role, count in all_roles:
|
||||
# Always exclude superadmin role
|
||||
if role.code == "superadmin":
|
||||
continue
|
||||
|
||||
# Include system roles: admin and finance
|
||||
if role.is_system_role and role.code in ["admin", "finance"]:
|
||||
assignable_roles.append({
|
||||
"id": str(role.id),
|
||||
"code": role.code,
|
||||
"name": role.name,
|
||||
"description": role.description,
|
||||
"is_system_role": role.is_system_role,
|
||||
"created_at": role.created_at,
|
||||
"updated_at": role.updated_at,
|
||||
"permission_count": count
|
||||
})
|
||||
continue
|
||||
|
||||
# For custom roles, check if they're elevated
|
||||
if not role.is_system_role:
|
||||
role_permissions = db.query(RolePermission).filter(
|
||||
RolePermission.role_id == role.id
|
||||
).all()
|
||||
role_permission_ids = {rp.permission_id for rp in role_permissions}
|
||||
|
||||
# Check if custom role has permissions admin doesn't have (elevated)
|
||||
has_elevated_permissions = bool(role_permission_ids - admin_permission_codes)
|
||||
|
||||
# Only include non-elevated custom roles
|
||||
if not has_elevated_permissions:
|
||||
assignable_roles.append({
|
||||
"id": str(role.id),
|
||||
"code": role.code,
|
||||
"name": role.name,
|
||||
"description": role.description,
|
||||
"is_system_role": role.is_system_role,
|
||||
"created_at": role.created_at,
|
||||
"updated_at": role.updated_at,
|
||||
"permission_count": count
|
||||
})
|
||||
|
||||
return assignable_roles
|
||||
|
||||
@api_router.post("/admin/roles", response_model=RoleResponse)
|
||||
async def create_role(
|
||||
request: CreateRoleRequest,
|
||||
@@ -5980,8 +6297,8 @@ async def stripe_webhook(request: Request, db: Session = Depends(get_db)):
|
||||
raise HTTPException(status_code=400, detail="Missing stripe-signature header")
|
||||
|
||||
try:
|
||||
# Verify webhook signature
|
||||
event = verify_webhook_signature(payload, sig_header)
|
||||
# Verify webhook signature (pass db for reading webhook secret from database)
|
||||
event = verify_webhook_signature(payload, sig_header, db)
|
||||
except ValueError as e:
|
||||
logger.error(f"Webhook signature verification failed: {str(e)}")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
@@ -6081,13 +6398,351 @@ async def stripe_webhook(request: Request, db: Session = Depends(get_db)):
|
||||
|
||||
return {"status": "success"}
|
||||
|
||||
# ============================================================================
|
||||
# ADMIN SETTINGS ENDPOINTS
|
||||
# ============================================================================
|
||||
|
||||
# Helper functions for system settings
|
||||
def get_setting(db: Session, key: str, decrypt: bool = False) -> str | None:
|
||||
"""
|
||||
Get a system setting value from database.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
key: Setting key to retrieve
|
||||
decrypt: If True and setting_type is 'encrypted', decrypt the value
|
||||
|
||||
Returns:
|
||||
Setting value or None if not found
|
||||
"""
|
||||
from models import SystemSettings, SettingType
|
||||
from encryption_service import get_encryption_service
|
||||
|
||||
setting = db.query(SystemSettings).filter(SystemSettings.setting_key == key).first()
|
||||
if not setting:
|
||||
return None
|
||||
|
||||
value = setting.setting_value
|
||||
if decrypt and setting.setting_type == SettingType.encrypted and value:
|
||||
try:
|
||||
encryption_service = get_encryption_service()
|
||||
value = encryption_service.decrypt(value)
|
||||
except Exception as e:
|
||||
print(f"Failed to decrypt setting {key}: {e}")
|
||||
return None
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def set_setting(
|
||||
db: Session,
|
||||
key: str,
|
||||
value: str,
|
||||
user_id: str,
|
||||
setting_type: str = "plaintext",
|
||||
description: str = None,
|
||||
is_sensitive: bool = False,
|
||||
encrypt: bool = False
|
||||
) -> None:
|
||||
"""
|
||||
Set a system setting value in database.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
key: Setting key
|
||||
value: Setting value
|
||||
user_id: ID of user making the change
|
||||
setting_type: Type of setting (plaintext, encrypted, json)
|
||||
description: Human-readable description
|
||||
is_sensitive: Whether this is sensitive data
|
||||
encrypt: If True, encrypt the value before storing
|
||||
"""
|
||||
from models import SystemSettings, SettingType
|
||||
from encryption_service import get_encryption_service
|
||||
|
||||
# Encrypt value if requested
|
||||
if encrypt and value:
|
||||
encryption_service = get_encryption_service()
|
||||
value = encryption_service.encrypt(value)
|
||||
setting_type = "encrypted"
|
||||
|
||||
# Find or create setting
|
||||
setting = db.query(SystemSettings).filter(SystemSettings.setting_key == key).first()
|
||||
|
||||
if setting:
|
||||
# Update existing
|
||||
setting.setting_value = value
|
||||
setting.setting_type = SettingType[setting_type]
|
||||
setting.updated_by = user_id
|
||||
setting.updated_at = datetime.now(timezone.utc)
|
||||
if description:
|
||||
setting.description = description
|
||||
setting.is_sensitive = is_sensitive
|
||||
else:
|
||||
# Create new
|
||||
setting = SystemSettings(
|
||||
setting_key=key,
|
||||
setting_value=value,
|
||||
setting_type=SettingType[setting_type],
|
||||
description=description,
|
||||
updated_by=user_id,
|
||||
is_sensitive=is_sensitive
|
||||
)
|
||||
db.add(setting)
|
||||
|
||||
db.commit()
|
||||
|
||||
@api_router.get("/admin/settings/stripe/status")
|
||||
async def get_stripe_status(
|
||||
current_user: User = Depends(get_current_superadmin),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get Stripe integration status (superadmin only).
|
||||
|
||||
Returns:
|
||||
- configured: Whether credentials exist in database
|
||||
- secret_key_prefix: First 10 chars of secret key (for verification)
|
||||
- webhook_configured: Whether webhook secret exists
|
||||
- environment: test or live (based on key prefix)
|
||||
- webhook_url: Full webhook URL for Stripe configuration
|
||||
"""
|
||||
import os
|
||||
|
||||
# Read from database
|
||||
secret_key = get_setting(db, 'stripe_secret_key', decrypt=True)
|
||||
webhook_secret = get_setting(db, 'stripe_webhook_secret', decrypt=True)
|
||||
|
||||
configured = bool(secret_key)
|
||||
environment = 'unknown'
|
||||
|
||||
if secret_key:
|
||||
if secret_key.startswith('sk_test_'):
|
||||
environment = 'test'
|
||||
elif secret_key.startswith('sk_live_'):
|
||||
environment = 'live'
|
||||
|
||||
# Get backend URL from environment for webhook URL
|
||||
# Try multiple environment variable patterns for flexibility
|
||||
backend_url = (
|
||||
os.environ.get('BACKEND_URL') or
|
||||
os.environ.get('API_URL') or
|
||||
f"http://{os.environ.get('HOST', 'localhost')}:{os.environ.get('PORT', '8000')}"
|
||||
)
|
||||
webhook_url = f"{backend_url}/api/webhooks/stripe"
|
||||
|
||||
return {
|
||||
"configured": configured,
|
||||
"secret_key_prefix": secret_key[:10] if secret_key else None,
|
||||
"secret_key_set": bool(secret_key),
|
||||
"webhook_secret_set": bool(webhook_secret),
|
||||
"environment": environment,
|
||||
"webhook_url": webhook_url,
|
||||
"instructions": {
|
||||
"location": "Database (system_settings table)",
|
||||
"required_settings": [
|
||||
"stripe_secret_key (sk_test_... or sk_live_...)",
|
||||
"stripe_webhook_secret (whsec_...)"
|
||||
],
|
||||
"restart_required": "No - changes take effect immediately"
|
||||
}
|
||||
}
|
||||
|
||||
@api_router.post("/admin/settings/stripe/test-connection")
|
||||
async def test_stripe_connection(
|
||||
current_user: User = Depends(get_current_superadmin),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Test Stripe API connection (superadmin only).
|
||||
|
||||
Performs a simple API call to verify credentials work.
|
||||
"""
|
||||
import stripe
|
||||
|
||||
# Read from database
|
||||
secret_key = get_setting(db, 'stripe_secret_key', decrypt=True)
|
||||
|
||||
if not secret_key:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="STRIPE_SECRET_KEY not configured in database. Please configure Stripe settings first."
|
||||
)
|
||||
|
||||
try:
|
||||
stripe.api_key = secret_key
|
||||
|
||||
# Make a simple API call to test connection
|
||||
balance = stripe.Balance.retrieve()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Stripe connection successful",
|
||||
"environment": "test" if secret_key.startswith('sk_test_') else "live",
|
||||
"balance": {
|
||||
"available": balance.available,
|
||||
"pending": balance.pending
|
||||
}
|
||||
}
|
||||
except stripe.error.AuthenticationError as e:
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail=f"Stripe authentication failed: {str(e)}"
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Stripe connection test failed: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
class UpdateStripeSettingsRequest(BaseModel):
|
||||
"""Request model for updating Stripe settings"""
|
||||
secret_key: str = Field(..., min_length=1, description="Stripe secret key (sk_test_... or sk_live_...)")
|
||||
webhook_secret: str = Field(..., min_length=1, description="Stripe webhook secret (whsec_...)")
|
||||
|
||||
|
||||
@api_router.put("/admin/settings/stripe")
|
||||
async def update_stripe_settings(
|
||||
request: UpdateStripeSettingsRequest,
|
||||
current_user: User = Depends(get_current_superadmin),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Update Stripe integration settings (superadmin only).
|
||||
|
||||
Stores Stripe credentials encrypted in the database.
|
||||
Changes take effect immediately without server restart.
|
||||
"""
|
||||
# Validate secret key format
|
||||
if not (request.secret_key.startswith('sk_test_') or request.secret_key.startswith('sk_live_')):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Invalid Stripe secret key format. Must start with 'sk_test_' or 'sk_live_'"
|
||||
)
|
||||
|
||||
# Validate webhook secret format
|
||||
if not request.webhook_secret.startswith('whsec_'):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Invalid Stripe webhook secret format. Must start with 'whsec_'"
|
||||
)
|
||||
|
||||
try:
|
||||
# Store secret key (encrypted)
|
||||
set_setting(
|
||||
db=db,
|
||||
key='stripe_secret_key',
|
||||
value=request.secret_key,
|
||||
user_id=str(current_user.id),
|
||||
description='Stripe API secret key for payment processing',
|
||||
is_sensitive=True,
|
||||
encrypt=True
|
||||
)
|
||||
|
||||
# Store webhook secret (encrypted)
|
||||
set_setting(
|
||||
db=db,
|
||||
key='stripe_webhook_secret',
|
||||
value=request.webhook_secret,
|
||||
user_id=str(current_user.id),
|
||||
description='Stripe webhook secret for verifying webhook signatures',
|
||||
is_sensitive=True,
|
||||
encrypt=True
|
||||
)
|
||||
|
||||
# Determine environment
|
||||
environment = 'test' if request.secret_key.startswith('sk_test_') else 'live'
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Stripe settings updated successfully",
|
||||
"environment": environment,
|
||||
"updated_at": datetime.now(timezone.utc).isoformat(),
|
||||
"updated_by": f"{current_user.first_name} {current_user.last_name}"
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to update Stripe settings: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
# Include the router in the main app
|
||||
app.include_router(api_router)
|
||||
|
||||
# ============================================================================
|
||||
# MIDDLEWARE CONFIGURATION
|
||||
# ============================================================================
|
||||
# IMPORTANT: In FastAPI, middleware is executed in REVERSE order of addition
|
||||
# Last added = First executed
|
||||
# So we add them in this order: Security Headers -> CORS
|
||||
# Execution order will be: CORS -> Security Headers
|
||||
|
||||
# Security Headers Middleware (Added first, executes second)
|
||||
@app.middleware("http")
|
||||
async def add_security_headers(request: Request, call_next):
|
||||
response = await call_next(request)
|
||||
|
||||
# Security headers to protect against common vulnerabilities
|
||||
security_headers = {
|
||||
# Prevent clickjacking attacks
|
||||
"X-Frame-Options": "DENY",
|
||||
|
||||
# Prevent MIME type sniffing
|
||||
"X-Content-Type-Options": "nosniff",
|
||||
|
||||
# Enable XSS protection in older browsers
|
||||
"X-XSS-Protection": "1; mode=block",
|
||||
|
||||
# Control referrer information
|
||||
"Referrer-Policy": "strict-origin-when-cross-origin",
|
||||
|
||||
# Permissions policy (formerly Feature-Policy)
|
||||
"Permissions-Policy": "geolocation=(), microphone=(), camera=()",
|
||||
}
|
||||
|
||||
# Add HSTS header in production (force HTTPS)
|
||||
if IS_PRODUCTION:
|
||||
security_headers["Strict-Transport-Security"] = "max-age=31536000; includeSubDomains"
|
||||
|
||||
# Apply all security headers
|
||||
for header, value in security_headers.items():
|
||||
response.headers[header] = value
|
||||
|
||||
# Remove server identification headers (use del, not pop for MutableHeaders)
|
||||
if "Server" in response.headers:
|
||||
del response.headers["Server"]
|
||||
|
||||
return response
|
||||
|
||||
print(f"✓ Security headers configured (Production: {IS_PRODUCTION})")
|
||||
|
||||
# CORS Configuration (Added second, executes first)
|
||||
cors_origins = os.environ.get('CORS_ORIGINS', '')
|
||||
if cors_origins:
|
||||
# Use explicitly configured origins
|
||||
allowed_origins = [origin.strip() for origin in cors_origins.split(',')]
|
||||
else:
|
||||
# Default to common development origins if not configured
|
||||
allowed_origins = [
|
||||
"http://localhost:3000",
|
||||
"http://localhost:8000",
|
||||
"http://127.0.0.1:3000",
|
||||
"http://127.0.0.1:8000"
|
||||
]
|
||||
print(f"⚠️ WARNING: CORS_ORIGINS not set. Using defaults: {allowed_origins}")
|
||||
print("⚠️ For production, set CORS_ORIGINS in .env file!")
|
||||
|
||||
print(f"✓ CORS allowed origins: {allowed_origins}")
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_credentials=True,
|
||||
allow_origins=os.environ.get('CORS_ORIGINS', '*').split(','),
|
||||
allow_methods=["*"],
|
||||
allow_origins=allowed_origins,
|
||||
allow_methods=["GET", "POST", "PUT", "DELETE", "OPTIONS", "PATCH"],
|
||||
allow_headers=["*"],
|
||||
expose_headers=["*"],
|
||||
max_age=600, # Cache preflight requests for 10 minutes
|
||||
)
|
||||
Reference in New Issue
Block a user