Compare commits
8 Commits
dev
...
661a4cbb7c
| Author | SHA1 | Date | |
|---|---|---|---|
| 661a4cbb7c | |||
| a01a8b9915 | |||
| e126cb988c | |||
| fd988241a1 | |||
| c28eddca67 | |||
| e20542ccdc | |||
| b3f1f5f789 | |||
| 1da045f73f |
@@ -1,83 +0,0 @@
|
|||||||
# Git
|
|
||||||
.git
|
|
||||||
.gitignore
|
|
||||||
|
|
||||||
# Python
|
|
||||||
__pycache__
|
|
||||||
*.py[cod]
|
|
||||||
*$py.class
|
|
||||||
*.so
|
|
||||||
.Python
|
|
||||||
build/
|
|
||||||
develop-eggs/
|
|
||||||
dist/
|
|
||||||
downloads/
|
|
||||||
eggs/
|
|
||||||
.eggs/
|
|
||||||
lib/
|
|
||||||
lib64/
|
|
||||||
parts/
|
|
||||||
sdist/
|
|
||||||
var/
|
|
||||||
wheels/
|
|
||||||
*.egg-info/
|
|
||||||
.installed.cfg
|
|
||||||
*.egg
|
|
||||||
|
|
||||||
# Virtual environments
|
|
||||||
venv/
|
|
||||||
ENV/
|
|
||||||
env/
|
|
||||||
.venv/
|
|
||||||
|
|
||||||
# IDE
|
|
||||||
.idea/
|
|
||||||
.vscode/
|
|
||||||
*.swp
|
|
||||||
*.swo
|
|
||||||
*~
|
|
||||||
|
|
||||||
# Testing
|
|
||||||
.pytest_cache/
|
|
||||||
.coverage
|
|
||||||
htmlcov/
|
|
||||||
.tox/
|
|
||||||
.nox/
|
|
||||||
|
|
||||||
# Environment files (will be mounted or passed via env vars)
|
|
||||||
.env
|
|
||||||
.env.local
|
|
||||||
.env.*.local
|
|
||||||
*.env
|
|
||||||
|
|
||||||
# Logs
|
|
||||||
*.log
|
|
||||||
logs/
|
|
||||||
|
|
||||||
# Database
|
|
||||||
*.db
|
|
||||||
*.sqlite3
|
|
||||||
|
|
||||||
# Alembic
|
|
||||||
alembic/versions/__pycache__/
|
|
||||||
|
|
||||||
# Docker
|
|
||||||
Dockerfile
|
|
||||||
docker-compose*.yml
|
|
||||||
.docker/
|
|
||||||
|
|
||||||
# Documentation
|
|
||||||
*.md
|
|
||||||
docs/
|
|
||||||
|
|
||||||
# Temporary files
|
|
||||||
tmp/
|
|
||||||
temp/
|
|
||||||
*.tmp
|
|
||||||
|
|
||||||
# OS files
|
|
||||||
.DS_Store
|
|
||||||
Thumbs.db
|
|
||||||
|
|
||||||
# Uploads (will be mounted as volume)
|
|
||||||
uploads/
|
|
||||||
13
.env.example
13
.env.example
@@ -6,10 +6,6 @@ JWT_SECRET=your-secret-key-change-this-in-production
|
|||||||
JWT_ALGORITHM=HS256
|
JWT_ALGORITHM=HS256
|
||||||
ACCESS_TOKEN_EXPIRE_MINUTES=30
|
ACCESS_TOKEN_EXPIRE_MINUTES=30
|
||||||
|
|
||||||
# Settings Encryption (for database-stored sensitive settings)
|
|
||||||
# Generate with: python -c "import secrets; print(secrets.token_urlsafe(64))"
|
|
||||||
SETTINGS_ENCRYPTION_KEY=your-encryption-key-generate-with-command-above
|
|
||||||
|
|
||||||
# SMTP Email Configuration (Port 465 - SSL/TLS)
|
# SMTP Email Configuration (Port 465 - SSL/TLS)
|
||||||
SMTP_HOST=p.konceptkit.com
|
SMTP_HOST=p.konceptkit.com
|
||||||
SMTP_PORT=465
|
SMTP_PORT=465
|
||||||
@@ -32,14 +28,7 @@ SMTP_FROM_NAME=LOAF Membership
|
|||||||
# Frontend URL
|
# Frontend URL
|
||||||
FRONTEND_URL=http://localhost:3000
|
FRONTEND_URL=http://localhost:3000
|
||||||
|
|
||||||
# Backend URL (for webhook URLs and API references)
|
# Stripe Configuration (for future payment integration)
|
||||||
# Used to construct Stripe webhook URL shown in Admin Settings
|
|
||||||
BACKEND_URL=http://localhost:8000
|
|
||||||
|
|
||||||
# Stripe Configuration (NOW DATABASE-DRIVEN via Admin Settings page)
|
|
||||||
# Configure Stripe credentials through the Admin Settings UI (requires SETTINGS_ENCRYPTION_KEY)
|
|
||||||
# No longer requires .env variables - managed through database for dynamic updates
|
|
||||||
# Legacy .env variables below are deprecated:
|
|
||||||
# STRIPE_SECRET_KEY=sk_test_...
|
# STRIPE_SECRET_KEY=sk_test_...
|
||||||
# STRIPE_WEBHOOK_SECRET=whsec_...
|
# STRIPE_WEBHOOK_SECRET=whsec_...
|
||||||
|
|
||||||
|
|||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -245,9 +245,6 @@ temp_uploads/
|
|||||||
tmp/
|
tmp/
|
||||||
temporary/
|
temporary/
|
||||||
|
|
||||||
# Generated SQL files (from scripts)
|
|
||||||
create_superadmin.sql
|
|
||||||
|
|
||||||
# CSV imports
|
# CSV imports
|
||||||
imports/*.csv
|
imports/*.csv
|
||||||
!imports/.gitkeep
|
!imports/.gitkeep
|
||||||
|
|||||||
40
Dockerfile
40
Dockerfile
@@ -1,40 +0,0 @@
|
|||||||
# Backend Dockerfile - FastAPI with Python
|
|
||||||
FROM python:3.11-slim
|
|
||||||
|
|
||||||
# Set environment variables
|
|
||||||
ENV PYTHONDONTWRITEBYTECODE=1
|
|
||||||
ENV PYTHONUNBUFFERED=1
|
|
||||||
ENV PYTHONPATH=/app
|
|
||||||
|
|
||||||
# Set work directory
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
# Install system dependencies
|
|
||||||
RUN apt-get update && apt-get install -y \
|
|
||||||
gcc \
|
|
||||||
libpq-dev \
|
|
||||||
curl \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# Install Python dependencies
|
|
||||||
COPY requirements.txt .
|
|
||||||
RUN pip install --no-cache-dir --upgrade pip && \
|
|
||||||
pip install --no-cache-dir -r requirements.txt
|
|
||||||
|
|
||||||
# Copy application code
|
|
||||||
COPY . .
|
|
||||||
|
|
||||||
# Create non-root user for security
|
|
||||||
RUN adduser --disabled-password --gecos '' appuser && \
|
|
||||||
chown -R appuser:appuser /app
|
|
||||||
USER appuser
|
|
||||||
|
|
||||||
# Expose port
|
|
||||||
EXPOSE 8000
|
|
||||||
|
|
||||||
# Health check
|
|
||||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
|
||||||
CMD curl -f http://localhost:8000/health || exit 1
|
|
||||||
|
|
||||||
# Run the application
|
|
||||||
CMD ["uvicorn", "server:app", "--host", "0.0.0.0", "--port", "8000"]
|
|
||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,141 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
Add Directory Permissions Script
|
|
||||||
|
|
||||||
This script adds the new directory.view and directory.manage permissions
|
|
||||||
without clearing existing permissions.
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
python add_directory_permissions.py
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from sqlalchemy import create_engine, text
|
|
||||||
from sqlalchemy.orm import sessionmaker
|
|
||||||
from database import Base
|
|
||||||
from models import Permission, RolePermission, Role, UserRole
|
|
||||||
from dotenv import load_dotenv
|
|
||||||
|
|
||||||
# Load environment variables
|
|
||||||
load_dotenv()
|
|
||||||
|
|
||||||
# Database connection
|
|
||||||
DATABASE_URL = os.getenv("DATABASE_URL")
|
|
||||||
if not DATABASE_URL:
|
|
||||||
print("Error: DATABASE_URL environment variable not set")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
engine = create_engine(DATABASE_URL)
|
|
||||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
|
||||||
|
|
||||||
# New directory permissions
|
|
||||||
NEW_PERMISSIONS = [
|
|
||||||
{"code": "directory.view", "name": "View Directory Settings", "description": "View member directory field configuration", "module": "directory"},
|
|
||||||
{"code": "directory.manage", "name": "Manage Directory Fields", "description": "Enable/disable directory fields shown in Profile and Directory pages", "module": "directory"},
|
|
||||||
]
|
|
||||||
|
|
||||||
# Roles that should have these permissions
|
|
||||||
ROLE_PERMISSION_MAP = {
|
|
||||||
"directory.view": ["admin", "superadmin"],
|
|
||||||
"directory.manage": ["admin", "superadmin"],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def add_directory_permissions():
|
|
||||||
"""Add directory permissions and assign to appropriate roles"""
|
|
||||||
db = SessionLocal()
|
|
||||||
|
|
||||||
try:
|
|
||||||
print("=" * 60)
|
|
||||||
print("Adding Directory Permissions")
|
|
||||||
print("=" * 60)
|
|
||||||
|
|
||||||
# Step 1: Add permissions if they don't exist
|
|
||||||
print("\n1. Adding permissions...")
|
|
||||||
permission_map = {}
|
|
||||||
|
|
||||||
for perm_data in NEW_PERMISSIONS:
|
|
||||||
existing = db.query(Permission).filter(Permission.code == perm_data["code"]).first()
|
|
||||||
if existing:
|
|
||||||
print(f" - {perm_data['code']}: Already exists")
|
|
||||||
permission_map[perm_data["code"]] = existing
|
|
||||||
else:
|
|
||||||
permission = Permission(
|
|
||||||
code=perm_data["code"],
|
|
||||||
name=perm_data["name"],
|
|
||||||
description=perm_data["description"],
|
|
||||||
module=perm_data["module"]
|
|
||||||
)
|
|
||||||
db.add(permission)
|
|
||||||
db.flush() # Get the ID
|
|
||||||
permission_map[perm_data["code"]] = permission
|
|
||||||
print(f" - {perm_data['code']}: Created")
|
|
||||||
|
|
||||||
db.commit()
|
|
||||||
|
|
||||||
# Step 2: Get roles
|
|
||||||
print("\n2. Fetching roles...")
|
|
||||||
roles = db.query(Role).all()
|
|
||||||
role_map = {role.code: role for role in roles}
|
|
||||||
print(f" Found {len(roles)} roles: {', '.join(role_map.keys())}")
|
|
||||||
|
|
||||||
# Enum mapping for backward compatibility
|
|
||||||
role_enum_map = {
|
|
||||||
'guest': UserRole.guest,
|
|
||||||
'member': UserRole.member,
|
|
||||||
'admin': UserRole.admin,
|
|
||||||
'superadmin': UserRole.superadmin,
|
|
||||||
'finance': UserRole.finance
|
|
||||||
}
|
|
||||||
|
|
||||||
# Step 3: Assign permissions to roles
|
|
||||||
print("\n3. Assigning permissions to roles...")
|
|
||||||
for perm_code, role_codes in ROLE_PERMISSION_MAP.items():
|
|
||||||
permission = permission_map.get(perm_code)
|
|
||||||
if not permission:
|
|
||||||
print(f" Warning: Permission {perm_code} not found")
|
|
||||||
continue
|
|
||||||
|
|
||||||
for role_code in role_codes:
|
|
||||||
role = role_map.get(role_code)
|
|
||||||
if not role:
|
|
||||||
print(f" Warning: Role {role_code} not found")
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Check if mapping already exists
|
|
||||||
existing_mapping = db.query(RolePermission).filter(
|
|
||||||
RolePermission.role_id == role.id,
|
|
||||||
RolePermission.permission_id == permission.id
|
|
||||||
).first()
|
|
||||||
|
|
||||||
if existing_mapping:
|
|
||||||
print(f" - {role_code} -> {perm_code}: Already assigned")
|
|
||||||
else:
|
|
||||||
role_enum = role_enum_map.get(role_code, UserRole.guest)
|
|
||||||
mapping = RolePermission(
|
|
||||||
role=role_enum,
|
|
||||||
role_id=role.id,
|
|
||||||
permission_id=permission.id
|
|
||||||
)
|
|
||||||
db.add(mapping)
|
|
||||||
print(f" - {role_code} -> {perm_code}: Assigned")
|
|
||||||
|
|
||||||
db.commit()
|
|
||||||
|
|
||||||
print("\n" + "=" * 60)
|
|
||||||
print("Directory permissions added successfully!")
|
|
||||||
print("=" * 60)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
db.rollback()
|
|
||||||
print(f"\nError: {str(e)}")
|
|
||||||
import traceback
|
|
||||||
traceback.print_exc()
|
|
||||||
raise
|
|
||||||
finally:
|
|
||||||
db.close()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
add_directory_permissions()
|
|
||||||
@@ -1,141 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
Add Registration Permissions Script
|
|
||||||
|
|
||||||
This script adds the new registration.view and registration.manage permissions
|
|
||||||
without clearing existing permissions.
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
python add_registration_permissions.py
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from sqlalchemy import create_engine, text
|
|
||||||
from sqlalchemy.orm import sessionmaker
|
|
||||||
from database import Base
|
|
||||||
from models import Permission, RolePermission, Role, UserRole
|
|
||||||
from dotenv import load_dotenv
|
|
||||||
|
|
||||||
# Load environment variables
|
|
||||||
load_dotenv()
|
|
||||||
|
|
||||||
# Database connection
|
|
||||||
DATABASE_URL = os.getenv("DATABASE_URL")
|
|
||||||
if not DATABASE_URL:
|
|
||||||
print("Error: DATABASE_URL environment variable not set")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
engine = create_engine(DATABASE_URL)
|
|
||||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
|
||||||
|
|
||||||
# New registration permissions
|
|
||||||
NEW_PERMISSIONS = [
|
|
||||||
{"code": "registration.view", "name": "View Registration Settings", "description": "View registration form schema and settings", "module": "registration"},
|
|
||||||
{"code": "registration.manage", "name": "Manage Registration Form", "description": "Edit registration form schema, steps, and fields", "module": "registration"},
|
|
||||||
]
|
|
||||||
|
|
||||||
# Roles that should have these permissions
|
|
||||||
ROLE_PERMISSION_MAP = {
|
|
||||||
"registration.view": ["admin", "superadmin"],
|
|
||||||
"registration.manage": ["admin", "superadmin"],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def add_registration_permissions():
|
|
||||||
"""Add registration permissions and assign to appropriate roles"""
|
|
||||||
db = SessionLocal()
|
|
||||||
|
|
||||||
try:
|
|
||||||
print("=" * 60)
|
|
||||||
print("Adding Registration Permissions")
|
|
||||||
print("=" * 60)
|
|
||||||
|
|
||||||
# Step 1: Add permissions if they don't exist
|
|
||||||
print("\n1. Adding permissions...")
|
|
||||||
permission_map = {}
|
|
||||||
|
|
||||||
for perm_data in NEW_PERMISSIONS:
|
|
||||||
existing = db.query(Permission).filter(Permission.code == perm_data["code"]).first()
|
|
||||||
if existing:
|
|
||||||
print(f" - {perm_data['code']}: Already exists")
|
|
||||||
permission_map[perm_data["code"]] = existing
|
|
||||||
else:
|
|
||||||
permission = Permission(
|
|
||||||
code=perm_data["code"],
|
|
||||||
name=perm_data["name"],
|
|
||||||
description=perm_data["description"],
|
|
||||||
module=perm_data["module"]
|
|
||||||
)
|
|
||||||
db.add(permission)
|
|
||||||
db.flush() # Get the ID
|
|
||||||
permission_map[perm_data["code"]] = permission
|
|
||||||
print(f" - {perm_data['code']}: Created")
|
|
||||||
|
|
||||||
db.commit()
|
|
||||||
|
|
||||||
# Step 2: Get roles
|
|
||||||
print("\n2. Fetching roles...")
|
|
||||||
roles = db.query(Role).all()
|
|
||||||
role_map = {role.code: role for role in roles}
|
|
||||||
print(f" Found {len(roles)} roles: {', '.join(role_map.keys())}")
|
|
||||||
|
|
||||||
# Enum mapping for backward compatibility
|
|
||||||
role_enum_map = {
|
|
||||||
'guest': UserRole.guest,
|
|
||||||
'member': UserRole.member,
|
|
||||||
'admin': UserRole.admin,
|
|
||||||
'superadmin': UserRole.superadmin,
|
|
||||||
'finance': UserRole.finance
|
|
||||||
}
|
|
||||||
|
|
||||||
# Step 3: Assign permissions to roles
|
|
||||||
print("\n3. Assigning permissions to roles...")
|
|
||||||
for perm_code, role_codes in ROLE_PERMISSION_MAP.items():
|
|
||||||
permission = permission_map.get(perm_code)
|
|
||||||
if not permission:
|
|
||||||
print(f" Warning: Permission {perm_code} not found")
|
|
||||||
continue
|
|
||||||
|
|
||||||
for role_code in role_codes:
|
|
||||||
role = role_map.get(role_code)
|
|
||||||
if not role:
|
|
||||||
print(f" Warning: Role {role_code} not found")
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Check if mapping already exists
|
|
||||||
existing_mapping = db.query(RolePermission).filter(
|
|
||||||
RolePermission.role_id == role.id,
|
|
||||||
RolePermission.permission_id == permission.id
|
|
||||||
).first()
|
|
||||||
|
|
||||||
if existing_mapping:
|
|
||||||
print(f" - {role_code} -> {perm_code}: Already assigned")
|
|
||||||
else:
|
|
||||||
role_enum = role_enum_map.get(role_code, UserRole.guest)
|
|
||||||
mapping = RolePermission(
|
|
||||||
role=role_enum,
|
|
||||||
role_id=role.id,
|
|
||||||
permission_id=permission.id
|
|
||||||
)
|
|
||||||
db.add(mapping)
|
|
||||||
print(f" - {role_code} -> {perm_code}: Assigned")
|
|
||||||
|
|
||||||
db.commit()
|
|
||||||
|
|
||||||
print("\n" + "=" * 60)
|
|
||||||
print("Registration permissions added successfully!")
|
|
||||||
print("=" * 60)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
db.rollback()
|
|
||||||
print(f"\nError: {str(e)}")
|
|
||||||
import traceback
|
|
||||||
traceback.print_exc()
|
|
||||||
raise
|
|
||||||
finally:
|
|
||||||
db.close()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
add_registration_permissions()
|
|
||||||
@@ -24,48 +24,31 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
def upgrade() -> None:
|
||||||
"""Add missing user fields (skip if already exists)"""
|
"""Add missing user fields"""
|
||||||
from sqlalchemy import inspect
|
|
||||||
|
|
||||||
conn = op.get_bind()
|
|
||||||
inspector = inspect(conn)
|
|
||||||
existing_columns = {col['name'] for col in inspector.get_columns('users')}
|
|
||||||
|
|
||||||
# Add scholarship_reason
|
# Add scholarship_reason
|
||||||
if 'scholarship_reason' not in existing_columns:
|
|
||||||
op.add_column('users', sa.Column('scholarship_reason', sa.Text(), nullable=True))
|
op.add_column('users', sa.Column('scholarship_reason', sa.Text(), nullable=True))
|
||||||
|
|
||||||
# Add directory fields
|
# Add directory fields
|
||||||
if 'directory_email' not in existing_columns:
|
|
||||||
op.add_column('users', sa.Column('directory_email', sa.String(), nullable=True))
|
op.add_column('users', sa.Column('directory_email', sa.String(), nullable=True))
|
||||||
if 'directory_bio' not in existing_columns:
|
|
||||||
op.add_column('users', sa.Column('directory_bio', sa.Text(), nullable=True))
|
op.add_column('users', sa.Column('directory_bio', sa.Text(), nullable=True))
|
||||||
if 'directory_address' not in existing_columns:
|
|
||||||
op.add_column('users', sa.Column('directory_address', sa.String(), nullable=True))
|
op.add_column('users', sa.Column('directory_address', sa.String(), nullable=True))
|
||||||
if 'directory_phone' not in existing_columns:
|
|
||||||
op.add_column('users', sa.Column('directory_phone', sa.String(), nullable=True))
|
op.add_column('users', sa.Column('directory_phone', sa.String(), nullable=True))
|
||||||
if 'directory_dob' not in existing_columns:
|
|
||||||
op.add_column('users', sa.Column('directory_dob', sa.DateTime(), nullable=True))
|
op.add_column('users', sa.Column('directory_dob', sa.DateTime(), nullable=True))
|
||||||
if 'directory_partner_name' not in existing_columns:
|
|
||||||
op.add_column('users', sa.Column('directory_partner_name', sa.String(), nullable=True))
|
op.add_column('users', sa.Column('directory_partner_name', sa.String(), nullable=True))
|
||||||
|
|
||||||
# Rename profile_image_url to profile_photo_url (skip if already renamed)
|
# Rename profile_image_url to profile_photo_url (for consistency with models.py)
|
||||||
if 'profile_image_url' in existing_columns and 'profile_photo_url' not in existing_columns:
|
|
||||||
op.alter_column('users', 'profile_image_url', new_column_name='profile_photo_url')
|
op.alter_column('users', 'profile_image_url', new_column_name='profile_photo_url')
|
||||||
|
|
||||||
# Add social media fields
|
# Add social media fields
|
||||||
if 'social_media_facebook' not in existing_columns:
|
|
||||||
op.add_column('users', sa.Column('social_media_facebook', sa.String(), nullable=True))
|
op.add_column('users', sa.Column('social_media_facebook', sa.String(), nullable=True))
|
||||||
if 'social_media_instagram' not in existing_columns:
|
|
||||||
op.add_column('users', sa.Column('social_media_instagram', sa.String(), nullable=True))
|
op.add_column('users', sa.Column('social_media_instagram', sa.String(), nullable=True))
|
||||||
if 'social_media_twitter' not in existing_columns:
|
|
||||||
op.add_column('users', sa.Column('social_media_twitter', sa.String(), nullable=True))
|
op.add_column('users', sa.Column('social_media_twitter', sa.String(), nullable=True))
|
||||||
if 'social_media_linkedin' not in existing_columns:
|
|
||||||
op.add_column('users', sa.Column('social_media_linkedin', sa.String(), nullable=True))
|
op.add_column('users', sa.Column('social_media_linkedin', sa.String(), nullable=True))
|
||||||
|
|
||||||
# Add email_verification_expires if missing
|
# Add email_verification_expires (exists in DB but not in models.py initially)
|
||||||
if 'email_verification_expires' not in existing_columns:
|
# Check if it already exists, if not add it
|
||||||
op.add_column('users', sa.Column('email_verification_expires', sa.DateTime(), nullable=True))
|
# This field should already exist from the initial schema, but adding for completeness
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
def downgrade() -> None:
|
||||||
|
|||||||
@@ -22,23 +22,10 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
def upgrade() -> None:
|
||||||
"""Add optional pre-filled information fields to user_invitations (skip if already exists)"""
|
"""Add optional pre-filled information fields to user_invitations"""
|
||||||
from sqlalchemy import inspect
|
|
||||||
|
|
||||||
conn = op.get_bind()
|
|
||||||
inspector = inspect(conn)
|
|
||||||
existing_columns = {col['name'] for col in inspector.get_columns('user_invitations')}
|
|
||||||
|
|
||||||
# Add first_name if missing
|
|
||||||
if 'first_name' not in existing_columns:
|
|
||||||
op.add_column('user_invitations', sa.Column('first_name', sa.String(), nullable=True))
|
op.add_column('user_invitations', sa.Column('first_name', sa.String(), nullable=True))
|
||||||
|
|
||||||
# Add last_name if missing
|
|
||||||
if 'last_name' not in existing_columns:
|
|
||||||
op.add_column('user_invitations', sa.Column('last_name', sa.String(), nullable=True))
|
op.add_column('user_invitations', sa.Column('last_name', sa.String(), nullable=True))
|
||||||
|
|
||||||
# Add phone if missing
|
|
||||||
if 'phone' not in existing_columns:
|
|
||||||
op.add_column('user_invitations', sa.Column('phone', sa.String(), nullable=True))
|
op.add_column('user_invitations', sa.Column('phone', sa.String(), nullable=True))
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -22,25 +22,15 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
def upgrade() -> None:
|
||||||
"""Add file_size_bytes column to document tables (skip if already exists)"""
|
"""Add file_size_bytes column to document tables"""
|
||||||
from sqlalchemy import inspect
|
|
||||||
|
|
||||||
conn = op.get_bind()
|
# Add to newsletter_archives
|
||||||
inspector = inspect(conn)
|
|
||||||
|
|
||||||
# Add to newsletter_archives if missing
|
|
||||||
existing_columns = {col['name'] for col in inspector.get_columns('newsletter_archives')}
|
|
||||||
if 'file_size_bytes' not in existing_columns:
|
|
||||||
op.add_column('newsletter_archives', sa.Column('file_size_bytes', sa.Integer(), nullable=True))
|
op.add_column('newsletter_archives', sa.Column('file_size_bytes', sa.Integer(), nullable=True))
|
||||||
|
|
||||||
# Add to financial_reports if missing
|
# Add to financial_reports
|
||||||
existing_columns = {col['name'] for col in inspector.get_columns('financial_reports')}
|
|
||||||
if 'file_size_bytes' not in existing_columns:
|
|
||||||
op.add_column('financial_reports', sa.Column('file_size_bytes', sa.Integer(), nullable=True))
|
op.add_column('financial_reports', sa.Column('file_size_bytes', sa.Integer(), nullable=True))
|
||||||
|
|
||||||
# Add to bylaws_documents if missing
|
# Add to bylaws_documents
|
||||||
existing_columns = {col['name'] for col in inspector.get_columns('bylaws_documents')}
|
|
||||||
if 'file_size_bytes' not in existing_columns:
|
|
||||||
op.add_column('bylaws_documents', sa.Column('file_size_bytes', sa.Integer(), nullable=True))
|
op.add_column('bylaws_documents', sa.Column('file_size_bytes', sa.Integer(), nullable=True))
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -22,43 +22,25 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
def upgrade() -> None:
|
||||||
"""Add missing columns and fix naming (skip if already exists)"""
|
"""Add missing columns and fix naming"""
|
||||||
from sqlalchemy import inspect
|
|
||||||
|
|
||||||
conn = op.get_bind()
|
# Add missing columns to subscriptions table
|
||||||
inspector = inspect(conn)
|
|
||||||
|
|
||||||
# Check existing columns in subscriptions table
|
|
||||||
existing_columns = {col['name'] for col in inspector.get_columns('subscriptions')}
|
|
||||||
|
|
||||||
# Add missing columns to subscriptions table only if they don't exist
|
|
||||||
if 'start_date' not in existing_columns:
|
|
||||||
op.add_column('subscriptions', sa.Column('start_date', sa.DateTime(timezone=True), nullable=True))
|
op.add_column('subscriptions', sa.Column('start_date', sa.DateTime(timezone=True), nullable=True))
|
||||||
if 'end_date' not in existing_columns:
|
|
||||||
op.add_column('subscriptions', sa.Column('end_date', sa.DateTime(timezone=True), nullable=True))
|
op.add_column('subscriptions', sa.Column('end_date', sa.DateTime(timezone=True), nullable=True))
|
||||||
if 'amount_paid_cents' not in existing_columns:
|
|
||||||
op.add_column('subscriptions', sa.Column('amount_paid_cents', sa.Integer(), nullable=True))
|
op.add_column('subscriptions', sa.Column('amount_paid_cents', sa.Integer(), nullable=True))
|
||||||
if 'manual_payment_notes' not in existing_columns:
|
|
||||||
op.add_column('subscriptions', sa.Column('manual_payment_notes', sa.Text(), nullable=True))
|
op.add_column('subscriptions', sa.Column('manual_payment_notes', sa.Text(), nullable=True))
|
||||||
if 'manual_payment_admin_id' not in existing_columns:
|
|
||||||
op.add_column('subscriptions', sa.Column('manual_payment_admin_id', UUID(as_uuid=True), nullable=True))
|
op.add_column('subscriptions', sa.Column('manual_payment_admin_id', UUID(as_uuid=True), nullable=True))
|
||||||
if 'manual_payment_date' not in existing_columns:
|
|
||||||
op.add_column('subscriptions', sa.Column('manual_payment_date', sa.DateTime(timezone=True), nullable=True))
|
op.add_column('subscriptions', sa.Column('manual_payment_date', sa.DateTime(timezone=True), nullable=True))
|
||||||
if 'payment_method' not in existing_columns:
|
|
||||||
op.add_column('subscriptions', sa.Column('payment_method', sa.String(50), nullable=True))
|
op.add_column('subscriptions', sa.Column('payment_method', sa.String(50), nullable=True))
|
||||||
|
|
||||||
# Add foreign key for manual_payment_admin_id if it doesn't exist
|
# Add foreign key for manual_payment_admin_id
|
||||||
existing_fks = [fk['name'] for fk in inspector.get_foreign_keys('subscriptions')]
|
|
||||||
if 'subscriptions_manual_payment_admin_id_fkey' not in existing_fks:
|
|
||||||
op.create_foreign_key(
|
op.create_foreign_key(
|
||||||
'subscriptions_manual_payment_admin_id_fkey',
|
'subscriptions_manual_payment_admin_id_fkey',
|
||||||
'subscriptions', 'users',
|
'subscriptions', 'users',
|
||||||
['manual_payment_admin_id'], ['id']
|
['manual_payment_admin_id'], ['id']
|
||||||
)
|
)
|
||||||
|
|
||||||
# Rename storage_usage.last_calculated_at to last_updated (only if needed)
|
# Rename storage_usage.last_calculated_at to last_updated
|
||||||
storage_columns = {col['name'] for col in inspector.get_columns('storage_usage')}
|
|
||||||
if 'last_calculated_at' in storage_columns and 'last_updated' not in storage_columns:
|
|
||||||
op.alter_column('storage_usage', 'last_calculated_at', new_column_name='last_updated')
|
op.alter_column('storage_usage', 'last_calculated_at', new_column_name='last_updated')
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -20,15 +20,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
def upgrade() -> None:
|
||||||
"""Rename is_active to active (skip if already renamed)"""
|
"""Rename is_active to active"""
|
||||||
from sqlalchemy import inspect
|
|
||||||
|
|
||||||
conn = op.get_bind()
|
|
||||||
inspector = inspect(conn)
|
|
||||||
|
|
||||||
# Check if rename is needed
|
|
||||||
existing_columns = {col['name'] for col in inspector.get_columns('subscription_plans')}
|
|
||||||
if 'is_active' in existing_columns and 'active' not in existing_columns:
|
|
||||||
op.alter_column('subscription_plans', 'is_active', new_column_name='active')
|
op.alter_column('subscription_plans', 'is_active', new_column_name='active')
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,65 +0,0 @@
|
|||||||
"""add_subscription_plan_fields
|
|
||||||
|
|
||||||
Revision ID: 007_add_sub_fields
|
|
||||||
Revises: 006_rename_active
|
|
||||||
Create Date: 2026-01-04
|
|
||||||
|
|
||||||
Fixes:
|
|
||||||
- Add missing columns to subscription_plans table
|
|
||||||
(custom cycle fields, dynamic pricing fields)
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '007_add_sub_fields'
|
|
||||||
down_revision: Union[str, None] = '006_rename_active'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Add missing columns to subscription_plans (skip if already exists)"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy import inspect
|
|
||||||
|
|
||||||
# Get database connection
|
|
||||||
conn = op.get_bind()
|
|
||||||
inspector = inspect(conn)
|
|
||||||
existing_columns = {col['name'] for col in inspector.get_columns('subscription_plans')}
|
|
||||||
|
|
||||||
# Custom billing cycle fields
|
|
||||||
if 'custom_cycle_enabled' not in existing_columns:
|
|
||||||
op.add_column('subscription_plans', sa.Column('custom_cycle_enabled', sa.Boolean(), nullable=False, server_default='false'))
|
|
||||||
if 'custom_cycle_start_month' not in existing_columns:
|
|
||||||
op.add_column('subscription_plans', sa.Column('custom_cycle_start_month', sa.Integer(), nullable=True))
|
|
||||||
if 'custom_cycle_start_day' not in existing_columns:
|
|
||||||
op.add_column('subscription_plans', sa.Column('custom_cycle_start_day', sa.Integer(), nullable=True))
|
|
||||||
if 'custom_cycle_end_month' not in existing_columns:
|
|
||||||
op.add_column('subscription_plans', sa.Column('custom_cycle_end_month', sa.Integer(), nullable=True))
|
|
||||||
if 'custom_cycle_end_day' not in existing_columns:
|
|
||||||
op.add_column('subscription_plans', sa.Column('custom_cycle_end_day', sa.Integer(), nullable=True))
|
|
||||||
|
|
||||||
# Dynamic pricing fields
|
|
||||||
if 'minimum_price_cents' not in existing_columns:
|
|
||||||
op.add_column('subscription_plans', sa.Column('minimum_price_cents', sa.Integer(), nullable=False, server_default='3000'))
|
|
||||||
if 'suggested_price_cents' not in existing_columns:
|
|
||||||
op.add_column('subscription_plans', sa.Column('suggested_price_cents', sa.Integer(), nullable=True))
|
|
||||||
if 'allow_donation' not in existing_columns:
|
|
||||||
op.add_column('subscription_plans', sa.Column('allow_donation', sa.Boolean(), nullable=False, server_default='true'))
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Remove added columns (rollback)"""
|
|
||||||
|
|
||||||
op.drop_column('subscription_plans', 'allow_donation')
|
|
||||||
op.drop_column('subscription_plans', 'suggested_price_cents')
|
|
||||||
op.drop_column('subscription_plans', 'minimum_price_cents')
|
|
||||||
op.drop_column('subscription_plans', 'custom_cycle_end_day')
|
|
||||||
op.drop_column('subscription_plans', 'custom_cycle_end_month')
|
|
||||||
op.drop_column('subscription_plans', 'custom_cycle_start_day')
|
|
||||||
op.drop_column('subscription_plans', 'custom_cycle_start_month')
|
|
||||||
op.drop_column('subscription_plans', 'custom_cycle_enabled')
|
|
||||||
@@ -1,55 +0,0 @@
|
|||||||
"""add_donation_columns
|
|
||||||
|
|
||||||
Revision ID: 008_add_donations
|
|
||||||
Revises: 007_add_sub_fields
|
|
||||||
Create Date: 2026-01-04
|
|
||||||
|
|
||||||
Fixes:
|
|
||||||
- Add missing Stripe payment columns to donations table
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy import inspect
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '008_add_donations'
|
|
||||||
down_revision: Union[str, None] = '007_add_sub_fields'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Add missing columns to donations table (skip if already exists)"""
|
|
||||||
|
|
||||||
# Get database connection
|
|
||||||
conn = op.get_bind()
|
|
||||||
inspector = inspect(conn)
|
|
||||||
existing_columns = {col['name'] for col in inspector.get_columns('donations')}
|
|
||||||
|
|
||||||
# Stripe payment columns
|
|
||||||
if 'stripe_checkout_session_id' not in existing_columns:
|
|
||||||
op.add_column('donations', sa.Column('stripe_checkout_session_id', sa.String(), nullable=True))
|
|
||||||
|
|
||||||
if 'stripe_payment_intent_id' not in existing_columns:
|
|
||||||
op.add_column('donations', sa.Column('stripe_payment_intent_id', sa.String(), nullable=True))
|
|
||||||
|
|
||||||
if 'payment_method' not in existing_columns:
|
|
||||||
op.add_column('donations', sa.Column('payment_method', sa.String(), nullable=True))
|
|
||||||
|
|
||||||
if 'notes' not in existing_columns:
|
|
||||||
op.add_column('donations', sa.Column('notes', sa.Text(), nullable=True))
|
|
||||||
|
|
||||||
if 'updated_at' not in existing_columns:
|
|
||||||
op.add_column('donations', sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True))
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Remove added columns (rollback)"""
|
|
||||||
|
|
||||||
op.drop_column('donations', 'updated_at')
|
|
||||||
op.drop_column('donations', 'notes')
|
|
||||||
op.drop_column('donations', 'payment_method')
|
|
||||||
op.drop_column('donations', 'stripe_payment_intent_id')
|
|
||||||
op.drop_column('donations', 'stripe_checkout_session_id')
|
|
||||||
@@ -1,237 +0,0 @@
|
|||||||
"""add_all_missing_columns
|
|
||||||
|
|
||||||
Revision ID: 009_add_all_missing
|
|
||||||
Revises: 008_add_donations
|
|
||||||
Create Date: 2026-01-04
|
|
||||||
|
|
||||||
Fixes:
|
|
||||||
- Add ALL remaining missing columns across all tables
|
|
||||||
- Users: newsletter preferences, volunteer, scholarship, directory, password reset, ToS, member_since, reminders, rejection, import tracking
|
|
||||||
- Events: calendar_uid
|
|
||||||
- Subscriptions: base_subscription_cents, donation_cents, manual_payment
|
|
||||||
- ImportJobs: WordPress import fields
|
|
||||||
- Create ImportRollbackAudit table if not exists
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy.dialects.postgresql import UUID
|
|
||||||
from sqlalchemy import inspect
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '009_add_all_missing'
|
|
||||||
down_revision: Union[str, None] = '008_add_donations'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Add all missing columns across all tables"""
|
|
||||||
|
|
||||||
conn = op.get_bind()
|
|
||||||
inspector = inspect(conn)
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# 1. USERS TABLE - Add ~28 missing columns
|
|
||||||
# ============================================================
|
|
||||||
users_columns = {col['name'] for col in inspector.get_columns('users')}
|
|
||||||
|
|
||||||
# Newsletter publication preferences
|
|
||||||
if 'newsletter_publish_name' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('newsletter_publish_name', sa.Boolean(), nullable=False, server_default='false'))
|
|
||||||
if 'newsletter_publish_photo' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('newsletter_publish_photo', sa.Boolean(), nullable=False, server_default='false'))
|
|
||||||
if 'newsletter_publish_birthday' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('newsletter_publish_birthday', sa.Boolean(), nullable=False, server_default='false'))
|
|
||||||
if 'newsletter_publish_none' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('newsletter_publish_none', sa.Boolean(), nullable=False, server_default='false'))
|
|
||||||
|
|
||||||
# Volunteer interests
|
|
||||||
if 'volunteer_interests' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('volunteer_interests', sa.JSON(), nullable=True, server_default='[]'))
|
|
||||||
|
|
||||||
# Scholarship
|
|
||||||
if 'scholarship_requested' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('scholarship_requested', sa.Boolean(), nullable=False, server_default='false'))
|
|
||||||
|
|
||||||
# Directory
|
|
||||||
if 'show_in_directory' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('show_in_directory', sa.Boolean(), nullable=False, server_default='false'))
|
|
||||||
|
|
||||||
# Password reset
|
|
||||||
if 'password_reset_token' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('password_reset_token', sa.String(), nullable=True))
|
|
||||||
if 'password_reset_expires' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('password_reset_expires', sa.DateTime(), nullable=True))
|
|
||||||
if 'force_password_change' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('force_password_change', sa.Boolean(), nullable=False, server_default='false'))
|
|
||||||
|
|
||||||
# Terms of Service
|
|
||||||
if 'accepts_tos' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('accepts_tos', sa.Boolean(), nullable=False, server_default='false'))
|
|
||||||
if 'tos_accepted_at' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('tos_accepted_at', sa.DateTime(), nullable=True))
|
|
||||||
|
|
||||||
# Member since
|
|
||||||
if 'member_since' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('member_since', sa.DateTime(), nullable=True))
|
|
||||||
|
|
||||||
# Email verification reminders
|
|
||||||
if 'email_verification_reminders_sent' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('email_verification_reminders_sent', sa.Integer(), nullable=False, server_default='0'))
|
|
||||||
if 'last_email_verification_reminder_at' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('last_email_verification_reminder_at', sa.DateTime(), nullable=True))
|
|
||||||
|
|
||||||
# Event attendance reminders
|
|
||||||
if 'event_attendance_reminders_sent' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('event_attendance_reminders_sent', sa.Integer(), nullable=False, server_default='0'))
|
|
||||||
if 'last_event_attendance_reminder_at' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('last_event_attendance_reminder_at', sa.DateTime(), nullable=True))
|
|
||||||
|
|
||||||
# Payment reminders
|
|
||||||
if 'payment_reminders_sent' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('payment_reminders_sent', sa.Integer(), nullable=False, server_default='0'))
|
|
||||||
if 'last_payment_reminder_at' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('last_payment_reminder_at', sa.DateTime(), nullable=True))
|
|
||||||
|
|
||||||
# Renewal reminders
|
|
||||||
if 'renewal_reminders_sent' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('renewal_reminders_sent', sa.Integer(), nullable=False, server_default='0'))
|
|
||||||
if 'last_renewal_reminder_at' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('last_renewal_reminder_at', sa.DateTime(), nullable=True))
|
|
||||||
|
|
||||||
# Rejection tracking
|
|
||||||
if 'rejection_reason' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('rejection_reason', sa.Text(), nullable=True))
|
|
||||||
if 'rejected_at' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('rejected_at', sa.DateTime(timezone=True), nullable=True))
|
|
||||||
if 'rejected_by' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('rejected_by', UUID(as_uuid=True), nullable=True))
|
|
||||||
# Note: Foreign key constraint skipped to avoid circular dependency issues
|
|
||||||
|
|
||||||
# WordPress import tracking
|
|
||||||
if 'import_source' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('import_source', sa.String(50), nullable=True))
|
|
||||||
if 'import_job_id' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('import_job_id', UUID(as_uuid=True), nullable=True))
|
|
||||||
# Note: Foreign key will be added after import_jobs table is updated
|
|
||||||
if 'wordpress_user_id' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('wordpress_user_id', sa.BigInteger(), nullable=True))
|
|
||||||
if 'wordpress_registered_date' not in users_columns:
|
|
||||||
op.add_column('users', sa.Column('wordpress_registered_date', sa.DateTime(timezone=True), nullable=True))
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# 2. EVENTS TABLE - Add calendar_uid
|
|
||||||
# ============================================================
|
|
||||||
events_columns = {col['name'] for col in inspector.get_columns('events')}
|
|
||||||
|
|
||||||
if 'calendar_uid' not in events_columns:
|
|
||||||
op.add_column('events', sa.Column('calendar_uid', sa.String(), nullable=True))
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# 3. SUBSCRIPTIONS TABLE - Add donation tracking
|
|
||||||
# ============================================================
|
|
||||||
subscriptions_columns = {col['name'] for col in inspector.get_columns('subscriptions')}
|
|
||||||
|
|
||||||
if 'base_subscription_cents' not in subscriptions_columns:
|
|
||||||
op.add_column('subscriptions', sa.Column('base_subscription_cents', sa.Integer(), nullable=True))
|
|
||||||
# Update existing rows: base_subscription_cents = amount_paid_cents - donation_cents (default 0)
|
|
||||||
op.execute("UPDATE subscriptions SET base_subscription_cents = COALESCE(amount_paid_cents, 0) WHERE base_subscription_cents IS NULL")
|
|
||||||
# Make it non-nullable after populating
|
|
||||||
op.alter_column('subscriptions', 'base_subscription_cents', nullable=False)
|
|
||||||
|
|
||||||
if 'donation_cents' not in subscriptions_columns:
|
|
||||||
op.add_column('subscriptions', sa.Column('donation_cents', sa.Integer(), nullable=False, server_default='0'))
|
|
||||||
|
|
||||||
if 'manual_payment' not in subscriptions_columns:
|
|
||||||
op.add_column('subscriptions', sa.Column('manual_payment', sa.Boolean(), nullable=False, server_default='false'))
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# 4. IMPORT_JOBS TABLE - Add WordPress import fields
|
|
||||||
# ============================================================
|
|
||||||
import_jobs_columns = {col['name'] for col in inspector.get_columns('import_jobs')}
|
|
||||||
|
|
||||||
if 'field_mapping' not in import_jobs_columns:
|
|
||||||
op.add_column('import_jobs', sa.Column('field_mapping', sa.JSON(), nullable=False, server_default='{}'))
|
|
||||||
|
|
||||||
if 'wordpress_metadata' not in import_jobs_columns:
|
|
||||||
op.add_column('import_jobs', sa.Column('wordpress_metadata', sa.JSON(), nullable=False, server_default='{}'))
|
|
||||||
|
|
||||||
if 'imported_user_ids' not in import_jobs_columns:
|
|
||||||
op.add_column('import_jobs', sa.Column('imported_user_ids', sa.JSON(), nullable=False, server_default='[]'))
|
|
||||||
|
|
||||||
if 'rollback_at' not in import_jobs_columns:
|
|
||||||
op.add_column('import_jobs', sa.Column('rollback_at', sa.DateTime(), nullable=True))
|
|
||||||
|
|
||||||
if 'rollback_by' not in import_jobs_columns:
|
|
||||||
op.add_column('import_jobs', sa.Column('rollback_by', UUID(as_uuid=True), nullable=True))
|
|
||||||
# Foreign key will be added if needed
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# 5. CREATE IMPORT_ROLLBACK_AUDIT TABLE
|
|
||||||
# ============================================================
|
|
||||||
if 'import_rollback_audit' not in inspector.get_table_names():
|
|
||||||
op.create_table(
|
|
||||||
'import_rollback_audit',
|
|
||||||
sa.Column('id', UUID(as_uuid=True), primary_key=True),
|
|
||||||
sa.Column('import_job_id', UUID(as_uuid=True), sa.ForeignKey('import_jobs.id'), nullable=False),
|
|
||||||
sa.Column('rolled_back_by', UUID(as_uuid=True), sa.ForeignKey('users.id'), nullable=False),
|
|
||||||
sa.Column('rolled_back_at', sa.DateTime(), nullable=False),
|
|
||||||
sa.Column('deleted_user_count', sa.Integer(), nullable=False),
|
|
||||||
sa.Column('deleted_user_ids', sa.JSON(), nullable=False),
|
|
||||||
sa.Column('reason', sa.Text(), nullable=True),
|
|
||||||
sa.Column('created_at', sa.DateTime(), nullable=False)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Remove all added columns and tables"""
|
|
||||||
|
|
||||||
# Drop import_rollback_audit table
|
|
||||||
op.drop_table('import_rollback_audit')
|
|
||||||
|
|
||||||
# Drop import_jobs columns
|
|
||||||
op.drop_column('import_jobs', 'rollback_by')
|
|
||||||
op.drop_column('import_jobs', 'rollback_at')
|
|
||||||
op.drop_column('import_jobs', 'imported_user_ids')
|
|
||||||
op.drop_column('import_jobs', 'wordpress_metadata')
|
|
||||||
op.drop_column('import_jobs', 'field_mapping')
|
|
||||||
|
|
||||||
# Drop subscriptions columns
|
|
||||||
op.drop_column('subscriptions', 'manual_payment')
|
|
||||||
op.drop_column('subscriptions', 'donation_cents')
|
|
||||||
op.drop_column('subscriptions', 'base_subscription_cents')
|
|
||||||
|
|
||||||
# Drop events columns
|
|
||||||
op.drop_column('events', 'calendar_uid')
|
|
||||||
|
|
||||||
# Drop users columns (in reverse order)
|
|
||||||
op.drop_column('users', 'wordpress_registered_date')
|
|
||||||
op.drop_column('users', 'wordpress_user_id')
|
|
||||||
op.drop_column('users', 'import_job_id')
|
|
||||||
op.drop_column('users', 'import_source')
|
|
||||||
op.drop_column('users', 'rejected_by')
|
|
||||||
op.drop_column('users', 'rejected_at')
|
|
||||||
op.drop_column('users', 'rejection_reason')
|
|
||||||
op.drop_column('users', 'last_renewal_reminder_at')
|
|
||||||
op.drop_column('users', 'renewal_reminders_sent')
|
|
||||||
op.drop_column('users', 'last_payment_reminder_at')
|
|
||||||
op.drop_column('users', 'payment_reminders_sent')
|
|
||||||
op.drop_column('users', 'last_event_attendance_reminder_at')
|
|
||||||
op.drop_column('users', 'event_attendance_reminders_sent')
|
|
||||||
op.drop_column('users', 'last_email_verification_reminder_at')
|
|
||||||
op.drop_column('users', 'email_verification_reminders_sent')
|
|
||||||
op.drop_column('users', 'member_since')
|
|
||||||
op.drop_column('users', 'tos_accepted_at')
|
|
||||||
op.drop_column('users', 'accepts_tos')
|
|
||||||
op.drop_column('users', 'force_password_change')
|
|
||||||
op.drop_column('users', 'password_reset_expires')
|
|
||||||
op.drop_column('users', 'password_reset_token')
|
|
||||||
op.drop_column('users', 'show_in_directory')
|
|
||||||
op.drop_column('users', 'scholarship_requested')
|
|
||||||
op.drop_column('users', 'volunteer_interests')
|
|
||||||
op.drop_column('users', 'newsletter_publish_none')
|
|
||||||
op.drop_column('users', 'newsletter_publish_birthday')
|
|
||||||
op.drop_column('users', 'newsletter_publish_photo')
|
|
||||||
op.drop_column('users', 'newsletter_publish_name')
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
"""add_email_verification_expires
|
|
||||||
|
|
||||||
Revision ID: 010_add_email_exp
|
|
||||||
Revises: 009_add_all_missing
|
|
||||||
Create Date: 2026-01-05
|
|
||||||
|
|
||||||
Fixes:
|
|
||||||
- Add missing email_verification_expires column to users table
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '010_add_email_exp'
|
|
||||||
down_revision: Union[str, None] = '009_add_all_missing'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Add email_verification_expires column (skip if already exists)"""
|
|
||||||
from sqlalchemy import inspect
|
|
||||||
|
|
||||||
conn = op.get_bind()
|
|
||||||
inspector = inspect(conn)
|
|
||||||
existing_columns = {col['name'] for col in inspector.get_columns('users')}
|
|
||||||
|
|
||||||
# Add email_verification_expires if missing
|
|
||||||
if 'email_verification_expires' not in existing_columns:
|
|
||||||
op.add_column('users', sa.Column('email_verification_expires', sa.DateTime(), nullable=True))
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Remove email_verification_expires column"""
|
|
||||||
op.drop_column('users', 'email_verification_expires')
|
|
||||||
@@ -1,410 +0,0 @@
|
|||||||
"""align_prod_with_dev
|
|
||||||
|
|
||||||
Revision ID: 011_align_prod_dev
|
|
||||||
Revises: 010_add_email_exp
|
|
||||||
Create Date: 2026-01-05
|
|
||||||
|
|
||||||
Aligns PROD database schema with DEV database schema (source of truth).
|
|
||||||
Fixes type mismatches, removes PROD-only columns, adds DEV-only columns, updates nullable constraints.
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy.dialects.postgresql import JSONB, JSON
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '011_align_prod_dev'
|
|
||||||
down_revision: Union[str, None] = '010_add_email_exp'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Align PROD schema with DEV schema (source of truth)"""
|
|
||||||
from sqlalchemy import inspect
|
|
||||||
|
|
||||||
conn = op.get_bind()
|
|
||||||
inspector = inspect(conn)
|
|
||||||
|
|
||||||
print("Starting schema alignment: PROD → DEV (source of truth)...")
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# 1. FIX USERS TABLE
|
|
||||||
# ============================================================
|
|
||||||
print("\n[1/14] Fixing users table...")
|
|
||||||
|
|
||||||
users_columns = {col['name'] for col in inspector.get_columns('users')}
|
|
||||||
|
|
||||||
# Remove PROD-only columns (not in models.py or DEV)
|
|
||||||
if 'bio' in users_columns:
|
|
||||||
op.drop_column('users', 'bio')
|
|
||||||
print(" ✓ Removed users.bio (PROD-only)")
|
|
||||||
|
|
||||||
if 'interests' in users_columns:
|
|
||||||
op.drop_column('users', 'interests')
|
|
||||||
print(" ✓ Removed users.interests (PROD-only)")
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Change constrained VARCHAR(n) to unconstrained VARCHAR
|
|
||||||
op.alter_column('users', 'first_name', type_=sa.String(), postgresql_using='first_name::varchar')
|
|
||||||
op.alter_column('users', 'last_name', type_=sa.String(), postgresql_using='last_name::varchar')
|
|
||||||
op.alter_column('users', 'email', type_=sa.String(), postgresql_using='email::varchar')
|
|
||||||
op.alter_column('users', 'phone', type_=sa.String(), postgresql_using='phone::varchar')
|
|
||||||
op.alter_column('users', 'city', type_=sa.String(), postgresql_using='city::varchar')
|
|
||||||
op.alter_column('users', 'state', type_=sa.String(), postgresql_using='state::varchar')
|
|
||||||
op.alter_column('users', 'zipcode', type_=sa.String(), postgresql_using='zipcode::varchar')
|
|
||||||
op.alter_column('users', 'partner_first_name', type_=sa.String(), postgresql_using='partner_first_name::varchar')
|
|
||||||
op.alter_column('users', 'partner_last_name', type_=sa.String(), postgresql_using='partner_last_name::varchar')
|
|
||||||
op.alter_column('users', 'referred_by_member_name', type_=sa.String(), postgresql_using='referred_by_member_name::varchar')
|
|
||||||
op.alter_column('users', 'password_hash', type_=sa.String(), postgresql_using='password_hash::varchar')
|
|
||||||
op.alter_column('users', 'email_verification_token', type_=sa.String(), postgresql_using='email_verification_token::varchar')
|
|
||||||
op.alter_column('users', 'password_reset_token', type_=sa.String(), postgresql_using='password_reset_token::varchar')
|
|
||||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
|
||||||
|
|
||||||
# Change TEXT to VARCHAR
|
|
||||||
op.alter_column('users', 'address', type_=sa.String(), postgresql_using='address::varchar')
|
|
||||||
op.alter_column('users', 'profile_photo_url', type_=sa.String(), postgresql_using='profile_photo_url::varchar')
|
|
||||||
print(" ✓ Changed TEXT to VARCHAR")
|
|
||||||
|
|
||||||
# Change DATE to TIMESTAMP
|
|
||||||
op.alter_column('users', 'date_of_birth', type_=sa.DateTime(), postgresql_using='date_of_birth::timestamp')
|
|
||||||
op.alter_column('users', 'member_since', type_=sa.DateTime(), postgresql_using='member_since::timestamp')
|
|
||||||
print(" ✓ Changed DATE to TIMESTAMP")
|
|
||||||
|
|
||||||
# Change JSONB to JSON
|
|
||||||
op.alter_column('users', 'lead_sources', type_=JSON(), postgresql_using='lead_sources::json')
|
|
||||||
print(" ✓ Changed lead_sources JSONB to JSON")
|
|
||||||
|
|
||||||
# Change TEXT to JSON for volunteer_interests
|
|
||||||
op.alter_column('users', 'volunteer_interests', type_=JSON(), postgresql_using='volunteer_interests::json')
|
|
||||||
print(" ✓ Changed volunteer_interests TEXT to JSON")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠️ Warning: Some type conversions failed: {e}")
|
|
||||||
|
|
||||||
# Fill NULL values with defaults BEFORE setting NOT NULL constraints
|
|
||||||
print(" ⏳ Filling NULL values with defaults...")
|
|
||||||
|
|
||||||
# Update string fields
|
|
||||||
conn.execute(sa.text("UPDATE users SET address = '' WHERE address IS NULL"))
|
|
||||||
conn.execute(sa.text("UPDATE users SET city = '' WHERE city IS NULL"))
|
|
||||||
conn.execute(sa.text("UPDATE users SET state = '' WHERE state IS NULL"))
|
|
||||||
conn.execute(sa.text("UPDATE users SET zipcode = '' WHERE zipcode IS NULL"))
|
|
||||||
conn.execute(sa.text("UPDATE users SET phone = '' WHERE phone IS NULL"))
|
|
||||||
|
|
||||||
# Update date_of_birth with sentinel date
|
|
||||||
conn.execute(sa.text("UPDATE users SET date_of_birth = '1900-01-01'::timestamp WHERE date_of_birth IS NULL"))
|
|
||||||
|
|
||||||
# Update boolean fields
|
|
||||||
conn.execute(sa.text("UPDATE users SET show_in_directory = false WHERE show_in_directory IS NULL"))
|
|
||||||
conn.execute(sa.text("UPDATE users SET newsletter_publish_name = false WHERE newsletter_publish_name IS NULL"))
|
|
||||||
conn.execute(sa.text("UPDATE users SET newsletter_publish_birthday = false WHERE newsletter_publish_birthday IS NULL"))
|
|
||||||
conn.execute(sa.text("UPDATE users SET newsletter_publish_photo = false WHERE newsletter_publish_photo IS NULL"))
|
|
||||||
conn.execute(sa.text("UPDATE users SET newsletter_publish_none = false WHERE newsletter_publish_none IS NULL"))
|
|
||||||
conn.execute(sa.text("UPDATE users SET force_password_change = false WHERE force_password_change IS NULL"))
|
|
||||||
conn.execute(sa.text("UPDATE users SET scholarship_requested = false WHERE scholarship_requested IS NULL"))
|
|
||||||
conn.execute(sa.text("UPDATE users SET accepts_tos = false WHERE accepts_tos IS NULL"))
|
|
||||||
|
|
||||||
# Check how many rows were updated
|
|
||||||
null_check = conn.execute(sa.text("""
|
|
||||||
SELECT
|
|
||||||
COUNT(*) FILTER (WHERE address = '') as address_filled,
|
|
||||||
COUNT(*) FILTER (WHERE date_of_birth = '1900-01-01'::timestamp) as dob_filled
|
|
||||||
FROM users
|
|
||||||
""")).fetchone()
|
|
||||||
print(f" ✓ Filled NULLs: {null_check[0]} addresses, {null_check[1]} dates of birth")
|
|
||||||
|
|
||||||
# Now safe to set NOT NULL constraints
|
|
||||||
op.alter_column('users', 'address', nullable=False)
|
|
||||||
op.alter_column('users', 'city', nullable=False)
|
|
||||||
op.alter_column('users', 'state', nullable=False)
|
|
||||||
op.alter_column('users', 'zipcode', nullable=False)
|
|
||||||
op.alter_column('users', 'phone', nullable=False)
|
|
||||||
op.alter_column('users', 'date_of_birth', nullable=False)
|
|
||||||
op.alter_column('users', 'show_in_directory', nullable=False)
|
|
||||||
op.alter_column('users', 'newsletter_publish_name', nullable=False)
|
|
||||||
op.alter_column('users', 'newsletter_publish_birthday', nullable=False)
|
|
||||||
op.alter_column('users', 'newsletter_publish_photo', nullable=False)
|
|
||||||
op.alter_column('users', 'newsletter_publish_none', nullable=False)
|
|
||||||
op.alter_column('users', 'force_password_change', nullable=False)
|
|
||||||
op.alter_column('users', 'scholarship_requested', nullable=False)
|
|
||||||
op.alter_column('users', 'accepts_tos', nullable=False)
|
|
||||||
print(" ✓ Set NOT NULL constraints")
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# 2. FIX DONATIONS TABLE
|
|
||||||
# ============================================================
|
|
||||||
print("\n[2/14] Fixing donations table...")
|
|
||||||
|
|
||||||
donations_columns = {col['name'] for col in inspector.get_columns('donations')}
|
|
||||||
|
|
||||||
# Remove PROD-only columns
|
|
||||||
if 'is_anonymous' in donations_columns:
|
|
||||||
op.drop_column('donations', 'is_anonymous')
|
|
||||||
print(" ✓ Removed donations.is_anonymous (PROD-only)")
|
|
||||||
|
|
||||||
if 'completed_at' in donations_columns:
|
|
||||||
op.drop_column('donations', 'completed_at')
|
|
||||||
print(" ✓ Removed donations.completed_at (PROD-only)")
|
|
||||||
|
|
||||||
if 'message' in donations_columns:
|
|
||||||
op.drop_column('donations', 'message')
|
|
||||||
print(" ✓ Removed donations.message (PROD-only)")
|
|
||||||
|
|
||||||
try:
|
|
||||||
op.alter_column('donations', 'donor_email', type_=sa.String(), postgresql_using='donor_email::varchar')
|
|
||||||
op.alter_column('donations', 'donor_name', type_=sa.String(), postgresql_using='donor_name::varchar')
|
|
||||||
op.alter_column('donations', 'stripe_payment_intent_id', type_=sa.String(), postgresql_using='stripe_payment_intent_id::varchar')
|
|
||||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠️ Warning: Type conversion failed: {e}")
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# 3. FIX SUBSCRIPTIONS TABLE
|
|
||||||
# ============================================================
|
|
||||||
print("\n[3/14] Fixing subscriptions table...")
|
|
||||||
|
|
||||||
subscriptions_columns = {col['name'] for col in inspector.get_columns('subscriptions')}
|
|
||||||
|
|
||||||
# Remove PROD-only columns
|
|
||||||
if 'cancel_at_period_end' in subscriptions_columns:
|
|
||||||
op.drop_column('subscriptions', 'cancel_at_period_end')
|
|
||||||
print(" ✓ Removed subscriptions.cancel_at_period_end (PROD-only)")
|
|
||||||
|
|
||||||
if 'canceled_at' in subscriptions_columns:
|
|
||||||
op.drop_column('subscriptions', 'canceled_at')
|
|
||||||
print(" ✓ Removed subscriptions.canceled_at (PROD-only)")
|
|
||||||
|
|
||||||
if 'current_period_start' in subscriptions_columns:
|
|
||||||
op.drop_column('subscriptions', 'current_period_start')
|
|
||||||
print(" ✓ Removed subscriptions.current_period_start (PROD-only)")
|
|
||||||
|
|
||||||
if 'current_period_end' in subscriptions_columns:
|
|
||||||
op.drop_column('subscriptions', 'current_period_end')
|
|
||||||
print(" ✓ Removed subscriptions.current_period_end (PROD-only)")
|
|
||||||
|
|
||||||
try:
|
|
||||||
op.alter_column('subscriptions', 'stripe_subscription_id', type_=sa.String(), postgresql_using='stripe_subscription_id::varchar')
|
|
||||||
op.alter_column('subscriptions', 'stripe_customer_id', type_=sa.String(), postgresql_using='stripe_customer_id::varchar')
|
|
||||||
op.alter_column('subscriptions', 'payment_method', type_=sa.String(), postgresql_using='payment_method::varchar')
|
|
||||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠️ Warning: Type conversion failed: {e}")
|
|
||||||
|
|
||||||
# Fix nullable constraints
|
|
||||||
op.alter_column('subscriptions', 'start_date', nullable=False)
|
|
||||||
op.alter_column('subscriptions', 'manual_payment', nullable=False)
|
|
||||||
op.alter_column('subscriptions', 'donation_cents', nullable=False)
|
|
||||||
op.alter_column('subscriptions', 'base_subscription_cents', nullable=False)
|
|
||||||
print(" ✓ Fixed nullable constraints")
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# 4. FIX STORAGE_USAGE TABLE
|
|
||||||
# ============================================================
|
|
||||||
print("\n[4/14] Fixing storage_usage table...")
|
|
||||||
|
|
||||||
storage_columns = {col['name'] for col in inspector.get_columns('storage_usage')}
|
|
||||||
|
|
||||||
# Remove PROD-only columns
|
|
||||||
if 'created_at' in storage_columns:
|
|
||||||
op.drop_column('storage_usage', 'created_at')
|
|
||||||
print(" ✓ Removed storage_usage.created_at (PROD-only)")
|
|
||||||
|
|
||||||
if 'updated_at' in storage_columns:
|
|
||||||
op.drop_column('storage_usage', 'updated_at')
|
|
||||||
print(" ✓ Removed storage_usage.updated_at (PROD-only)")
|
|
||||||
|
|
||||||
op.alter_column('storage_usage', 'max_bytes_allowed', nullable=False)
|
|
||||||
print(" ✓ Fixed nullable constraint")
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# 5. FIX EVENT_GALLERIES TABLE (Add missing DEV columns)
|
|
||||||
# ============================================================
|
|
||||||
print("\n[5/14] Fixing event_galleries table...")
|
|
||||||
|
|
||||||
event_galleries_columns = {col['name'] for col in inspector.get_columns('event_galleries')}
|
|
||||||
|
|
||||||
# Add DEV-only columns (exist in models.py but not in PROD)
|
|
||||||
if 'image_key' not in event_galleries_columns:
|
|
||||||
op.add_column('event_galleries', sa.Column('image_key', sa.String(), nullable=False, server_default=''))
|
|
||||||
print(" ✓ Added event_galleries.image_key")
|
|
||||||
|
|
||||||
if 'file_size_bytes' not in event_galleries_columns:
|
|
||||||
op.add_column('event_galleries', sa.Column('file_size_bytes', sa.Integer(), nullable=False, server_default='0'))
|
|
||||||
print(" ✓ Added event_galleries.file_size_bytes")
|
|
||||||
|
|
||||||
try:
|
|
||||||
op.alter_column('event_galleries', 'image_url', type_=sa.String(), postgresql_using='image_url::varchar')
|
|
||||||
print(" ✓ Changed TEXT to VARCHAR")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠️ Warning: Type conversion failed: {e}")
|
|
||||||
|
|
||||||
# Note: uploaded_by column already has correct nullable=False in both DEV and PROD
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# 6. FIX BYLAWS_DOCUMENTS TABLE
|
|
||||||
# ============================================================
|
|
||||||
print("\n[6/14] Fixing bylaws_documents table...")
|
|
||||||
|
|
||||||
bylaws_columns = {col['name'] for col in inspector.get_columns('bylaws_documents')}
|
|
||||||
|
|
||||||
# Remove PROD-only column
|
|
||||||
if 'updated_at' in bylaws_columns:
|
|
||||||
op.drop_column('bylaws_documents', 'updated_at')
|
|
||||||
print(" ✓ Removed bylaws_documents.updated_at (PROD-only)")
|
|
||||||
|
|
||||||
try:
|
|
||||||
op.alter_column('bylaws_documents', 'title', type_=sa.String(), postgresql_using='title::varchar')
|
|
||||||
op.alter_column('bylaws_documents', 'version', type_=sa.String(), postgresql_using='version::varchar')
|
|
||||||
op.alter_column('bylaws_documents', 'document_url', type_=sa.String(), postgresql_using='document_url::varchar')
|
|
||||||
op.alter_column('bylaws_documents', 'document_type', type_=sa.String(), postgresql_using='document_type::varchar')
|
|
||||||
print(" ✓ Changed column types")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠️ Warning: Type conversion failed: {e}")
|
|
||||||
|
|
||||||
op.alter_column('bylaws_documents', 'document_type', nullable=True)
|
|
||||||
print(" ✓ Fixed nullable constraint")
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# 7. FIX EVENTS TABLE
|
|
||||||
# ============================================================
|
|
||||||
print("\n[7/14] Fixing events table...")
|
|
||||||
|
|
||||||
try:
|
|
||||||
op.alter_column('events', 'title', type_=sa.String(), postgresql_using='title::varchar')
|
|
||||||
op.alter_column('events', 'location', type_=sa.String(), postgresql_using='location::varchar')
|
|
||||||
op.alter_column('events', 'calendar_uid', type_=sa.String(), postgresql_using='calendar_uid::varchar')
|
|
||||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠️ Warning: {e}")
|
|
||||||
|
|
||||||
op.alter_column('events', 'location', nullable=False)
|
|
||||||
op.alter_column('events', 'created_by', nullable=False)
|
|
||||||
print(" ✓ Fixed nullable constraints")
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# 8. FIX PERMISSIONS TABLE
|
|
||||||
# ============================================================
|
|
||||||
print("\n[8/14] Fixing permissions table...")
|
|
||||||
|
|
||||||
try:
|
|
||||||
op.alter_column('permissions', 'code', type_=sa.String(), postgresql_using='code::varchar')
|
|
||||||
op.alter_column('permissions', 'name', type_=sa.String(), postgresql_using='name::varchar')
|
|
||||||
op.alter_column('permissions', 'module', type_=sa.String(), postgresql_using='module::varchar')
|
|
||||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠️ Warning: {e}")
|
|
||||||
|
|
||||||
op.alter_column('permissions', 'module', nullable=False)
|
|
||||||
print(" ✓ Fixed nullable constraint")
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# 9. FIX ROLES TABLE
|
|
||||||
# ============================================================
|
|
||||||
print("\n[9/14] Fixing roles table...")
|
|
||||||
|
|
||||||
try:
|
|
||||||
op.alter_column('roles', 'code', type_=sa.String(), postgresql_using='code::varchar')
|
|
||||||
op.alter_column('roles', 'name', type_=sa.String(), postgresql_using='name::varchar')
|
|
||||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠️ Warning: {e}")
|
|
||||||
|
|
||||||
op.alter_column('roles', 'is_system_role', nullable=False)
|
|
||||||
print(" ✓ Fixed nullable constraint")
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# 10. FIX USER_INVITATIONS TABLE
|
|
||||||
# ============================================================
|
|
||||||
print("\n[10/14] Fixing user_invitations table...")
|
|
||||||
|
|
||||||
try:
|
|
||||||
op.alter_column('user_invitations', 'email', type_=sa.String(), postgresql_using='email::varchar')
|
|
||||||
op.alter_column('user_invitations', 'token', type_=sa.String(), postgresql_using='token::varchar')
|
|
||||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠️ Warning: {e}")
|
|
||||||
|
|
||||||
op.alter_column('user_invitations', 'invited_at', nullable=False)
|
|
||||||
print(" ✓ Fixed nullable constraint")
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# 11. FIX NEWSLETTER_ARCHIVES TABLE
|
|
||||||
# ============================================================
|
|
||||||
print("\n[11/14] Fixing newsletter_archives table...")
|
|
||||||
|
|
||||||
try:
|
|
||||||
op.alter_column('newsletter_archives', 'title', type_=sa.String(), postgresql_using='title::varchar')
|
|
||||||
op.alter_column('newsletter_archives', 'document_url', type_=sa.String(), postgresql_using='document_url::varchar')
|
|
||||||
op.alter_column('newsletter_archives', 'document_type', type_=sa.String(), postgresql_using='document_type::varchar')
|
|
||||||
print(" ✓ Changed column types")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠️ Warning: {e}")
|
|
||||||
|
|
||||||
op.alter_column('newsletter_archives', 'document_type', nullable=True)
|
|
||||||
print(" ✓ Fixed nullable constraint")
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# 12. FIX FINANCIAL_REPORTS TABLE
|
|
||||||
# ============================================================
|
|
||||||
print("\n[12/14] Fixing financial_reports table...")
|
|
||||||
|
|
||||||
try:
|
|
||||||
op.alter_column('financial_reports', 'title', type_=sa.String(), postgresql_using='title::varchar')
|
|
||||||
op.alter_column('financial_reports', 'document_url', type_=sa.String(), postgresql_using='document_url::varchar')
|
|
||||||
op.alter_column('financial_reports', 'document_type', type_=sa.String(), postgresql_using='document_type::varchar')
|
|
||||||
print(" ✓ Changed column types")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠️ Warning: {e}")
|
|
||||||
|
|
||||||
op.alter_column('financial_reports', 'document_type', nullable=True)
|
|
||||||
print(" ✓ Fixed nullable constraint")
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# 13. FIX IMPORT_JOBS TABLE
|
|
||||||
# ============================================================
|
|
||||||
print("\n[13/14] Fixing import_jobs table...")
|
|
||||||
|
|
||||||
try:
|
|
||||||
op.alter_column('import_jobs', 'filename', type_=sa.String(), postgresql_using='filename::varchar')
|
|
||||||
op.alter_column('import_jobs', 'file_key', type_=sa.String(), postgresql_using='file_key::varchar')
|
|
||||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
|
||||||
|
|
||||||
# Change JSONB to JSON
|
|
||||||
op.alter_column('import_jobs', 'errors', type_=JSON(), postgresql_using='errors::json')
|
|
||||||
print(" ✓ Changed errors JSONB to JSON")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠️ Warning: {e}")
|
|
||||||
|
|
||||||
# Fix nullable constraints
|
|
||||||
op.alter_column('import_jobs', 'processed_rows', nullable=False)
|
|
||||||
op.alter_column('import_jobs', 'successful_rows', nullable=False)
|
|
||||||
op.alter_column('import_jobs', 'failed_rows', nullable=False)
|
|
||||||
op.alter_column('import_jobs', 'errors', nullable=False)
|
|
||||||
op.alter_column('import_jobs', 'started_at', nullable=False)
|
|
||||||
print(" ✓ Fixed nullable constraints")
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# 14. FIX SUBSCRIPTION_PLANS TABLE
|
|
||||||
# ============================================================
|
|
||||||
print("\n[14/14] Fixing subscription_plans table...")
|
|
||||||
|
|
||||||
try:
|
|
||||||
op.alter_column('subscription_plans', 'name', type_=sa.String(), postgresql_using='name::varchar')
|
|
||||||
op.alter_column('subscription_plans', 'billing_cycle', type_=sa.String(), postgresql_using='billing_cycle::varchar')
|
|
||||||
op.alter_column('subscription_plans', 'stripe_price_id', type_=sa.String(), postgresql_using='stripe_price_id::varchar')
|
|
||||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠️ Warning: {e}")
|
|
||||||
|
|
||||||
op.alter_column('subscription_plans', 'minimum_price_cents', nullable=False)
|
|
||||||
print(" ✓ Fixed nullable constraint")
|
|
||||||
|
|
||||||
print("\n✅ Schema alignment complete! PROD now matches DEV (source of truth)")
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Revert alignment changes (not recommended)"""
|
|
||||||
print("⚠️ Downgrade not supported for alignment migration")
|
|
||||||
print(" To revert, restore from backup")
|
|
||||||
pass
|
|
||||||
@@ -1,170 +0,0 @@
|
|||||||
"""fix_remaining_differences
|
|
||||||
|
|
||||||
Revision ID: 012_fix_remaining
|
|
||||||
Revises: 011_align_prod_dev
|
|
||||||
Create Date: 2026-01-05
|
|
||||||
|
|
||||||
Fixes the last 5 schema differences found after migration 011:
|
|
||||||
1-2. import_rollback_audit nullable constraints (PROD)
|
|
||||||
3-4. role_permissions type and nullable (PROD)
|
|
||||||
5. UserStatus enum values (DEV - remove deprecated values)
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy.dialects.postgresql import ENUM
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '012_fix_remaining'
|
|
||||||
down_revision: Union[str, None] = '011_align_prod_dev'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Fix remaining schema differences"""
|
|
||||||
from sqlalchemy import inspect
|
|
||||||
|
|
||||||
conn = op.get_bind()
|
|
||||||
inspector = inspect(conn)
|
|
||||||
|
|
||||||
print("Fixing remaining schema differences...")
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# 1. FIX IMPORT_ROLLBACK_AUDIT TABLE (PROD only)
|
|
||||||
# ============================================================
|
|
||||||
print("\n[1/3] Fixing import_rollback_audit nullable constraints...")
|
|
||||||
|
|
||||||
# Check if there are any NULL values first
|
|
||||||
try:
|
|
||||||
null_count = conn.execute(sa.text("""
|
|
||||||
SELECT COUNT(*) FROM import_rollback_audit
|
|
||||||
WHERE created_at IS NULL OR rolled_back_at IS NULL
|
|
||||||
""")).scalar()
|
|
||||||
|
|
||||||
if null_count > 0:
|
|
||||||
# Fill NULLs with current timestamp
|
|
||||||
conn.execute(sa.text("""
|
|
||||||
UPDATE import_rollback_audit
|
|
||||||
SET created_at = NOW() WHERE created_at IS NULL
|
|
||||||
"""))
|
|
||||||
conn.execute(sa.text("""
|
|
||||||
UPDATE import_rollback_audit
|
|
||||||
SET rolled_back_at = NOW() WHERE rolled_back_at IS NULL
|
|
||||||
"""))
|
|
||||||
print(f" ✓ Filled {null_count} NULL timestamps")
|
|
||||||
|
|
||||||
# Now set NOT NULL
|
|
||||||
op.alter_column('import_rollback_audit', 'created_at', nullable=False)
|
|
||||||
op.alter_column('import_rollback_audit', 'rolled_back_at', nullable=False)
|
|
||||||
print(" ✓ Set NOT NULL constraints")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠️ Warning: {e}")
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# 2. FIX ROLE_PERMISSIONS TABLE (PROD only)
|
|
||||||
# ============================================================
|
|
||||||
print("\n[2/3] Fixing role_permissions.role type and nullable...")
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Change VARCHAR(50) to VARCHAR(10) to match UserRole enum
|
|
||||||
op.alter_column('role_permissions', 'role',
|
|
||||||
type_=sa.String(10),
|
|
||||||
postgresql_using='role::varchar(10)')
|
|
||||||
print(" ✓ Changed VARCHAR(50) to VARCHAR(10)")
|
|
||||||
|
|
||||||
# Set NOT NULL
|
|
||||||
op.alter_column('role_permissions', 'role', nullable=False)
|
|
||||||
print(" ✓ Set NOT NULL constraint")
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠️ Warning: {e}")
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# 3. FIX USERSTATUS ENUM (DEV only - remove deprecated values)
|
|
||||||
# ============================================================
|
|
||||||
print("\n[3/3] Fixing UserStatus enum values...")
|
|
||||||
|
|
||||||
try:
|
|
||||||
# First, check if the enum has deprecated values
|
|
||||||
enum_values = conn.execute(sa.text("""
|
|
||||||
SELECT enumlabel
|
|
||||||
FROM pg_enum
|
|
||||||
WHERE enumtypid = (
|
|
||||||
SELECT oid FROM pg_type WHERE typname = 'userstatus'
|
|
||||||
)
|
|
||||||
""")).fetchall()
|
|
||||||
|
|
||||||
enum_values_list = [row[0] for row in enum_values]
|
|
||||||
has_deprecated = 'pending_approval' in enum_values_list or 'pre_approved' in enum_values_list
|
|
||||||
|
|
||||||
if not has_deprecated:
|
|
||||||
print(" ✓ UserStatus enum already correct (no deprecated values)")
|
|
||||||
else:
|
|
||||||
print(" ⏳ Found deprecated enum values, migrating...")
|
|
||||||
|
|
||||||
# Check if any users have deprecated status values
|
|
||||||
deprecated_count = conn.execute(sa.text("""
|
|
||||||
SELECT COUNT(*) FROM users
|
|
||||||
WHERE status IN ('pending_approval', 'pre_approved')
|
|
||||||
""")).scalar()
|
|
||||||
|
|
||||||
if deprecated_count > 0:
|
|
||||||
print(f" ⏳ Migrating {deprecated_count} users with deprecated status values...")
|
|
||||||
|
|
||||||
# Migrate deprecated values to new equivalents
|
|
||||||
conn.execute(sa.text("""
|
|
||||||
UPDATE users
|
|
||||||
SET status = 'pre_validated'
|
|
||||||
WHERE status = 'pre_approved'
|
|
||||||
"""))
|
|
||||||
|
|
||||||
conn.execute(sa.text("""
|
|
||||||
UPDATE users
|
|
||||||
SET status = 'payment_pending'
|
|
||||||
WHERE status = 'pending_approval'
|
|
||||||
"""))
|
|
||||||
|
|
||||||
print(" ✓ Migrated deprecated status values")
|
|
||||||
else:
|
|
||||||
print(" ✓ No users with deprecated status values")
|
|
||||||
|
|
||||||
# Now remove deprecated enum values
|
|
||||||
# PostgreSQL doesn't support removing enum values directly,
|
|
||||||
# so we need to recreate the enum
|
|
||||||
conn.execute(sa.text("""
|
|
||||||
-- Create new enum with correct values (matches models.py)
|
|
||||||
CREATE TYPE userstatus_new AS ENUM (
|
|
||||||
'pending_email',
|
|
||||||
'pending_validation',
|
|
||||||
'pre_validated',
|
|
||||||
'payment_pending',
|
|
||||||
'active',
|
|
||||||
'inactive',
|
|
||||||
'canceled',
|
|
||||||
'expired',
|
|
||||||
'rejected',
|
|
||||||
'abandoned'
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Update column to use new enum
|
|
||||||
ALTER TABLE users
|
|
||||||
ALTER COLUMN status TYPE userstatus_new
|
|
||||||
USING status::text::userstatus_new;
|
|
||||||
|
|
||||||
-- Drop old enum and rename new one
|
|
||||||
DROP TYPE userstatus;
|
|
||||||
ALTER TYPE userstatus_new RENAME TO userstatus;
|
|
||||||
"""))
|
|
||||||
|
|
||||||
print(" ✓ Updated UserStatus enum (removed deprecated values)")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f" ⚠️ Warning: Enum update failed (may already be correct): {e}")
|
|
||||||
|
|
||||||
print("\n✅ All remaining differences fixed!")
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Revert fixes (not recommended)"""
|
|
||||||
print("⚠️ Downgrade not supported")
|
|
||||||
pass
|
|
||||||
@@ -1,147 +0,0 @@
|
|||||||
"""sync_role_permissions
|
|
||||||
|
|
||||||
Revision ID: 013_sync_permissions
|
|
||||||
Revises: 012_fix_remaining
|
|
||||||
Create Date: 2026-01-05
|
|
||||||
|
|
||||||
Syncs role_permissions between DEV and PROD bidirectionally.
|
|
||||||
- Adds 18 DEV-only permissions to PROD (new features)
|
|
||||||
- Adds 6 PROD-only permissions to DEV (operational/security)
|
|
||||||
Result: Both environments have identical 142 permission mappings
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '013_sync_permissions'
|
|
||||||
down_revision: Union[str, None] = '012_fix_remaining'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Sync role_permissions bidirectionally"""
|
|
||||||
from sqlalchemy import text
|
|
||||||
|
|
||||||
conn = op.get_bind()
|
|
||||||
|
|
||||||
print("Syncing role_permissions between environments...")
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# STEP 1: Add missing permissions to ensure all exist
|
|
||||||
# ============================================================
|
|
||||||
print("\n[1/2] Ensuring all permissions exist...")
|
|
||||||
|
|
||||||
# Permissions that should exist (union of both environments)
|
|
||||||
all_permissions = [
|
|
||||||
# From DEV-only list
|
|
||||||
('donations.export', 'Export Donations', 'donations'),
|
|
||||||
('donations.view', 'View Donations', 'donations'),
|
|
||||||
('financials.create', 'Create Financial Reports', 'financials'),
|
|
||||||
('financials.delete', 'Delete Financial Reports', 'financials'),
|
|
||||||
('financials.edit', 'Edit Financial Reports', 'financials'),
|
|
||||||
('financials.export', 'Export Financial Reports', 'financials'),
|
|
||||||
('financials.payments', 'Manage Financial Payments', 'financials'),
|
|
||||||
('settings.edit', 'Edit Settings', 'settings'),
|
|
||||||
('settings.email_templates', 'Manage Email Templates', 'settings'),
|
|
||||||
('subscriptions.activate', 'Activate Subscriptions', 'subscriptions'),
|
|
||||||
('subscriptions.cancel', 'Cancel Subscriptions', 'subscriptions'),
|
|
||||||
('subscriptions.create', 'Create Subscriptions', 'subscriptions'),
|
|
||||||
('subscriptions.edit', 'Edit Subscriptions', 'subscriptions'),
|
|
||||||
('subscriptions.export', 'Export Subscriptions', 'subscriptions'),
|
|
||||||
('subscriptions.plans', 'Manage Subscription Plans', 'subscriptions'),
|
|
||||||
('subscriptions.view', 'View Subscriptions', 'subscriptions'),
|
|
||||||
('events.calendar_export', 'Export Event Calendar', 'events'),
|
|
||||||
('events.rsvps', 'View Event RSVPs', 'events'),
|
|
||||||
# From PROD-only list
|
|
||||||
('permissions.audit', 'Audit Permissions', 'permissions'),
|
|
||||||
('permissions.view', 'View Permissions', 'permissions'),
|
|
||||||
('settings.backup', 'Manage Backups', 'settings'),
|
|
||||||
]
|
|
||||||
|
|
||||||
for code, name, module in all_permissions:
|
|
||||||
# Insert if not exists
|
|
||||||
conn.execute(text(f"""
|
|
||||||
INSERT INTO permissions (id, code, name, description, module, created_at)
|
|
||||||
SELECT
|
|
||||||
gen_random_uuid(),
|
|
||||||
'{code}',
|
|
||||||
'{name}',
|
|
||||||
'{name}',
|
|
||||||
'{module}',
|
|
||||||
NOW()
|
|
||||||
WHERE NOT EXISTS (
|
|
||||||
SELECT 1 FROM permissions WHERE code = '{code}'
|
|
||||||
)
|
|
||||||
"""))
|
|
||||||
|
|
||||||
print(" ✓ Ensured all permissions exist")
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# STEP 2: Add missing role-permission mappings
|
|
||||||
# ============================================================
|
|
||||||
print("\n[2/2] Adding missing role-permission mappings...")
|
|
||||||
|
|
||||||
# Mappings that should exist (union of both environments)
|
|
||||||
role_permission_mappings = [
|
|
||||||
# DEV-only (add to PROD)
|
|
||||||
('admin', 'donations.export'),
|
|
||||||
('admin', 'donations.view'),
|
|
||||||
('admin', 'financials.create'),
|
|
||||||
('admin', 'financials.delete'),
|
|
||||||
('admin', 'financials.edit'),
|
|
||||||
('admin', 'financials.export'),
|
|
||||||
('admin', 'financials.payments'),
|
|
||||||
('admin', 'settings.edit'),
|
|
||||||
('admin', 'settings.email_templates'),
|
|
||||||
('admin', 'subscriptions.activate'),
|
|
||||||
('admin', 'subscriptions.cancel'),
|
|
||||||
('admin', 'subscriptions.create'),
|
|
||||||
('admin', 'subscriptions.edit'),
|
|
||||||
('admin', 'subscriptions.export'),
|
|
||||||
('admin', 'subscriptions.plans'),
|
|
||||||
('admin', 'subscriptions.view'),
|
|
||||||
('member', 'events.calendar_export'),
|
|
||||||
('member', 'events.rsvps'),
|
|
||||||
# PROD-only (add to DEV)
|
|
||||||
('admin', 'permissions.audit'),
|
|
||||||
('admin', 'permissions.view'),
|
|
||||||
('admin', 'settings.backup'),
|
|
||||||
('finance', 'bylaws.view'),
|
|
||||||
('finance', 'events.view'),
|
|
||||||
('finance', 'newsletters.view'),
|
|
||||||
]
|
|
||||||
|
|
||||||
added_count = 0
|
|
||||||
for role, perm_code in role_permission_mappings:
|
|
||||||
result = conn.execute(text(f"""
|
|
||||||
INSERT INTO role_permissions (id, role, permission_id, created_at)
|
|
||||||
SELECT
|
|
||||||
gen_random_uuid(),
|
|
||||||
'{role}',
|
|
||||||
p.id,
|
|
||||||
NOW()
|
|
||||||
FROM permissions p
|
|
||||||
WHERE p.code = '{perm_code}'
|
|
||||||
AND NOT EXISTS (
|
|
||||||
SELECT 1 FROM role_permissions rp
|
|
||||||
WHERE rp.role = '{role}'
|
|
||||||
AND rp.permission_id = p.id
|
|
||||||
)
|
|
||||||
RETURNING id
|
|
||||||
"""))
|
|
||||||
if result.rowcount > 0:
|
|
||||||
added_count += 1
|
|
||||||
|
|
||||||
print(f" ✓ Added {added_count} missing role-permission mappings")
|
|
||||||
|
|
||||||
# Verify final count
|
|
||||||
final_count = conn.execute(text("SELECT COUNT(*) FROM role_permissions")).scalar()
|
|
||||||
print(f"\n✅ Role-permission mappings synchronized: {final_count} total")
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Revert sync (not recommended)"""
|
|
||||||
print("⚠️ Downgrade not supported - permissions are additive")
|
|
||||||
pass
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
"""add_custom_registration_data
|
|
||||||
|
|
||||||
Revision ID: 014_custom_registration
|
|
||||||
Revises: a1b2c3d4e5f6
|
|
||||||
Create Date: 2026-02-01 10:00:00.000000
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '014_custom_registration'
|
|
||||||
down_revision: Union[str, None] = 'a1b2c3d4e5f6'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
# Add custom_registration_data column to users table
|
|
||||||
# This stores dynamic registration field responses as JSON
|
|
||||||
op.add_column('users', sa.Column(
|
|
||||||
'custom_registration_data',
|
|
||||||
sa.JSON,
|
|
||||||
nullable=False,
|
|
||||||
server_default='{}'
|
|
||||||
))
|
|
||||||
|
|
||||||
# Add comment for documentation
|
|
||||||
op.execute("""
|
|
||||||
COMMENT ON COLUMN users.custom_registration_data IS
|
|
||||||
'Dynamic registration field responses stored as JSON for custom form fields';
|
|
||||||
""")
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
op.drop_column('users', 'custom_registration_data')
|
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
"""add_role_audit_fields
|
|
||||||
|
|
||||||
Revision ID: 4fa11836f7fd
|
|
||||||
Revises: 013_sync_permissions
|
|
||||||
Create Date: 2026-01-16 17:21:40.514605
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy.dialects.postgresql import UUID
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '4fa11836f7fd'
|
|
||||||
down_revision: Union[str, None] = '013_sync_permissions'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
# Add role audit trail columns
|
|
||||||
op.add_column('users', sa.Column('role_changed_at', sa.DateTime(timezone=True), nullable=True))
|
|
||||||
op.add_column('users', sa.Column('role_changed_by', UUID(as_uuid=True), nullable=True))
|
|
||||||
|
|
||||||
# Create foreign key constraint to track who changed the role
|
|
||||||
op.create_foreign_key(
|
|
||||||
'fk_users_role_changed_by',
|
|
||||||
'users', 'users',
|
|
||||||
['role_changed_by'], ['id'],
|
|
||||||
ondelete='SET NULL'
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create index for efficient querying by role change date
|
|
||||||
op.create_index('idx_users_role_changed_at', 'users', ['role_changed_at'])
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
# Drop index first
|
|
||||||
op.drop_index('idx_users_role_changed_at')
|
|
||||||
|
|
||||||
# Drop foreign key constraint
|
|
||||||
op.drop_constraint('fk_users_role_changed_by', 'users', type_='foreignkey')
|
|
||||||
|
|
||||||
# Drop columns
|
|
||||||
op.drop_column('users', 'role_changed_by')
|
|
||||||
op.drop_column('users', 'role_changed_at')
|
|
||||||
@@ -1,76 +0,0 @@
|
|||||||
"""add_stripe_transaction_metadata
|
|
||||||
|
|
||||||
Revision ID: 956ea1628264
|
|
||||||
Revises: ec4cb4a49cde
|
|
||||||
Create Date: 2026-01-20 22:00:01.806931
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '956ea1628264'
|
|
||||||
down_revision: Union[str, None] = 'ec4cb4a49cde'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
# Add Stripe transaction metadata to subscriptions table
|
|
||||||
op.add_column('subscriptions', sa.Column('stripe_payment_intent_id', sa.String(), nullable=True))
|
|
||||||
op.add_column('subscriptions', sa.Column('stripe_charge_id', sa.String(), nullable=True))
|
|
||||||
op.add_column('subscriptions', sa.Column('stripe_invoice_id', sa.String(), nullable=True))
|
|
||||||
op.add_column('subscriptions', sa.Column('payment_completed_at', sa.DateTime(timezone=True), nullable=True))
|
|
||||||
op.add_column('subscriptions', sa.Column('card_last4', sa.String(4), nullable=True))
|
|
||||||
op.add_column('subscriptions', sa.Column('card_brand', sa.String(20), nullable=True))
|
|
||||||
op.add_column('subscriptions', sa.Column('stripe_receipt_url', sa.String(), nullable=True))
|
|
||||||
|
|
||||||
# Add indexes for Stripe transaction IDs in subscriptions
|
|
||||||
op.create_index('idx_subscriptions_payment_intent', 'subscriptions', ['stripe_payment_intent_id'])
|
|
||||||
op.create_index('idx_subscriptions_charge_id', 'subscriptions', ['stripe_charge_id'])
|
|
||||||
op.create_index('idx_subscriptions_invoice_id', 'subscriptions', ['stripe_invoice_id'])
|
|
||||||
|
|
||||||
# Add Stripe transaction metadata to donations table
|
|
||||||
op.add_column('donations', sa.Column('stripe_charge_id', sa.String(), nullable=True))
|
|
||||||
op.add_column('donations', sa.Column('stripe_customer_id', sa.String(), nullable=True))
|
|
||||||
op.add_column('donations', sa.Column('payment_completed_at', sa.DateTime(timezone=True), nullable=True))
|
|
||||||
op.add_column('donations', sa.Column('card_last4', sa.String(4), nullable=True))
|
|
||||||
op.add_column('donations', sa.Column('card_brand', sa.String(20), nullable=True))
|
|
||||||
op.add_column('donations', sa.Column('stripe_receipt_url', sa.String(), nullable=True))
|
|
||||||
|
|
||||||
# Add indexes for Stripe transaction IDs in donations
|
|
||||||
op.create_index('idx_donations_payment_intent', 'donations', ['stripe_payment_intent_id'])
|
|
||||||
op.create_index('idx_donations_charge_id', 'donations', ['stripe_charge_id'])
|
|
||||||
op.create_index('idx_donations_customer_id', 'donations', ['stripe_customer_id'])
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
# Remove indexes from donations
|
|
||||||
op.drop_index('idx_donations_customer_id', table_name='donations')
|
|
||||||
op.drop_index('idx_donations_charge_id', table_name='donations')
|
|
||||||
op.drop_index('idx_donations_payment_intent', table_name='donations')
|
|
||||||
|
|
||||||
# Remove columns from donations
|
|
||||||
op.drop_column('donations', 'stripe_receipt_url')
|
|
||||||
op.drop_column('donations', 'card_brand')
|
|
||||||
op.drop_column('donations', 'card_last4')
|
|
||||||
op.drop_column('donations', 'payment_completed_at')
|
|
||||||
op.drop_column('donations', 'stripe_customer_id')
|
|
||||||
op.drop_column('donations', 'stripe_charge_id')
|
|
||||||
|
|
||||||
# Remove indexes from subscriptions
|
|
||||||
op.drop_index('idx_subscriptions_invoice_id', table_name='subscriptions')
|
|
||||||
op.drop_index('idx_subscriptions_charge_id', table_name='subscriptions')
|
|
||||||
op.drop_index('idx_subscriptions_payment_intent', table_name='subscriptions')
|
|
||||||
|
|
||||||
# Remove columns from subscriptions
|
|
||||||
op.drop_column('subscriptions', 'stripe_receipt_url')
|
|
||||||
op.drop_column('subscriptions', 'card_brand')
|
|
||||||
op.drop_column('subscriptions', 'card_last4')
|
|
||||||
op.drop_column('subscriptions', 'payment_completed_at')
|
|
||||||
op.drop_column('subscriptions', 'stripe_invoice_id')
|
|
||||||
op.drop_column('subscriptions', 'stripe_charge_id')
|
|
||||||
op.drop_column('subscriptions', 'stripe_payment_intent_id')
|
|
||||||
@@ -1,100 +0,0 @@
|
|||||||
"""add_payment_methods
|
|
||||||
|
|
||||||
Revision ID: a1b2c3d4e5f6
|
|
||||||
Revises: 956ea1628264
|
|
||||||
Create Date: 2026-01-30 10:00:00.000000
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy.dialects import postgresql
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = 'a1b2c3d4e5f6'
|
|
||||||
down_revision: Union[str, None] = '956ea1628264'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
conn = op.get_bind()
|
|
||||||
|
|
||||||
# Create PaymentMethodType enum
|
|
||||||
paymentmethodtype = postgresql.ENUM(
|
|
||||||
'card', 'cash', 'bank_transfer', 'check',
|
|
||||||
name='paymentmethodtype',
|
|
||||||
create_type=False
|
|
||||||
)
|
|
||||||
paymentmethodtype.create(conn, checkfirst=True)
|
|
||||||
|
|
||||||
# Check if stripe_customer_id column exists on users table
|
|
||||||
result = conn.execute(sa.text("""
|
|
||||||
SELECT column_name FROM information_schema.columns
|
|
||||||
WHERE table_name = 'users' AND column_name = 'stripe_customer_id'
|
|
||||||
"""))
|
|
||||||
if result.fetchone() is None:
|
|
||||||
# Add stripe_customer_id to users table
|
|
||||||
op.add_column('users', sa.Column(
|
|
||||||
'stripe_customer_id',
|
|
||||||
sa.String(),
|
|
||||||
nullable=True,
|
|
||||||
comment='Stripe Customer ID for payment method management'
|
|
||||||
))
|
|
||||||
op.create_index('ix_users_stripe_customer_id', 'users', ['stripe_customer_id'])
|
|
||||||
|
|
||||||
# Check if payment_methods table exists
|
|
||||||
result = conn.execute(sa.text("""
|
|
||||||
SELECT table_name FROM information_schema.tables
|
|
||||||
WHERE table_name = 'payment_methods'
|
|
||||||
"""))
|
|
||||||
if result.fetchone() is None:
|
|
||||||
# Create payment_methods table
|
|
||||||
op.create_table(
|
|
||||||
'payment_methods',
|
|
||||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
|
|
||||||
sa.Column('user_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('users.id', ondelete='CASCADE'), nullable=False),
|
|
||||||
sa.Column('stripe_payment_method_id', sa.String(), nullable=True, unique=True, comment='Stripe pm_xxx reference'),
|
|
||||||
sa.Column('card_brand', sa.String(20), nullable=True, comment='Card brand: visa, mastercard, amex, etc.'),
|
|
||||||
sa.Column('card_last4', sa.String(4), nullable=True, comment='Last 4 digits of card'),
|
|
||||||
sa.Column('card_exp_month', sa.Integer(), nullable=True, comment='Card expiration month'),
|
|
||||||
sa.Column('card_exp_year', sa.Integer(), nullable=True, comment='Card expiration year'),
|
|
||||||
sa.Column('card_funding', sa.String(20), nullable=True, comment='Card funding type: credit, debit, prepaid'),
|
|
||||||
sa.Column('payment_type', paymentmethodtype, nullable=False, server_default='card'),
|
|
||||||
sa.Column('is_default', sa.Boolean(), nullable=False, server_default='false', comment='Whether this is the default payment method for auto-renewals'),
|
|
||||||
sa.Column('is_active', sa.Boolean(), nullable=False, server_default='true', comment='Soft delete flag - False means removed'),
|
|
||||||
sa.Column('is_manual', sa.Boolean(), nullable=False, server_default='false', comment='True for manually recorded methods (cash/check)'),
|
|
||||||
sa.Column('manual_notes', sa.Text(), nullable=True, comment='Admin notes for manual payment methods'),
|
|
||||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), sa.ForeignKey('users.id', ondelete='SET NULL'), nullable=True, comment='Admin who added this on behalf of user'),
|
|
||||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
|
||||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now(), onupdate=sa.func.now()),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create indexes
|
|
||||||
op.create_index('ix_payment_methods_user_id', 'payment_methods', ['user_id'])
|
|
||||||
op.create_index('ix_payment_methods_stripe_pm_id', 'payment_methods', ['stripe_payment_method_id'])
|
|
||||||
op.create_index('idx_payment_method_user_default', 'payment_methods', ['user_id', 'is_default'])
|
|
||||||
op.create_index('idx_payment_method_active', 'payment_methods', ['user_id', 'is_active'])
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
# Drop indexes
|
|
||||||
op.drop_index('idx_payment_method_active', table_name='payment_methods')
|
|
||||||
op.drop_index('idx_payment_method_user_default', table_name='payment_methods')
|
|
||||||
op.drop_index('ix_payment_methods_stripe_pm_id', table_name='payment_methods')
|
|
||||||
op.drop_index('ix_payment_methods_user_id', table_name='payment_methods')
|
|
||||||
|
|
||||||
# Drop payment_methods table
|
|
||||||
op.drop_table('payment_methods')
|
|
||||||
|
|
||||||
# Drop stripe_customer_id from users
|
|
||||||
op.drop_index('ix_users_stripe_customer_id', table_name='users')
|
|
||||||
op.drop_column('users', 'stripe_customer_id')
|
|
||||||
|
|
||||||
# Drop PaymentMethodType enum
|
|
||||||
paymentmethodtype = postgresql.ENUM(
|
|
||||||
'card', 'cash', 'bank_transfer', 'check',
|
|
||||||
name='paymentmethodtype'
|
|
||||||
)
|
|
||||||
paymentmethodtype.drop(op.get_bind(), checkfirst=True)
|
|
||||||
@@ -1,68 +0,0 @@
|
|||||||
"""add_system_settings_table
|
|
||||||
|
|
||||||
Revision ID: ec4cb4a49cde
|
|
||||||
Revises: 4fa11836f7fd
|
|
||||||
Create Date: 2026-01-16 18:16:00.283455
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy.dialects.postgresql import UUID
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = 'ec4cb4a49cde'
|
|
||||||
down_revision: Union[str, None] = '4fa11836f7fd'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
# Create enum for setting types (only if not exists)
|
|
||||||
op.execute("""
|
|
||||||
DO $$ BEGIN
|
|
||||||
CREATE TYPE settingtype AS ENUM ('plaintext', 'encrypted', 'json');
|
|
||||||
EXCEPTION
|
|
||||||
WHEN duplicate_object THEN null;
|
|
||||||
END $$;
|
|
||||||
""")
|
|
||||||
|
|
||||||
# Create system_settings table
|
|
||||||
op.execute("""
|
|
||||||
CREATE TABLE system_settings (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
setting_key VARCHAR(100) UNIQUE NOT NULL,
|
|
||||||
setting_value TEXT,
|
|
||||||
setting_type settingtype NOT NULL DEFAULT 'plaintext'::settingtype,
|
|
||||||
description TEXT,
|
|
||||||
updated_by UUID REFERENCES users(id) ON DELETE SET NULL,
|
|
||||||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
is_sensitive BOOLEAN NOT NULL DEFAULT FALSE
|
|
||||||
);
|
|
||||||
|
|
||||||
COMMENT ON COLUMN system_settings.setting_key IS 'Unique setting identifier (e.g., stripe_secret_key)';
|
|
||||||
COMMENT ON COLUMN system_settings.setting_value IS 'Setting value (encrypted if setting_type is encrypted)';
|
|
||||||
COMMENT ON COLUMN system_settings.setting_type IS 'Type of setting: plaintext, encrypted, or json';
|
|
||||||
COMMENT ON COLUMN system_settings.description IS 'Human-readable description of the setting';
|
|
||||||
COMMENT ON COLUMN system_settings.updated_by IS 'User who last updated this setting';
|
|
||||||
COMMENT ON COLUMN system_settings.is_sensitive IS 'Whether this setting contains sensitive data';
|
|
||||||
""")
|
|
||||||
|
|
||||||
# Create indexes
|
|
||||||
op.create_index('idx_system_settings_key', 'system_settings', ['setting_key'])
|
|
||||||
op.create_index('idx_system_settings_updated_at', 'system_settings', ['updated_at'])
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
# Drop indexes
|
|
||||||
op.drop_index('idx_system_settings_updated_at')
|
|
||||||
op.drop_index('idx_system_settings_key')
|
|
||||||
|
|
||||||
# Drop table
|
|
||||||
op.drop_table('system_settings')
|
|
||||||
|
|
||||||
# Drop enum
|
|
||||||
op.execute('DROP TYPE IF EXISTS settingtype')
|
|
||||||
4
auth.py
4
auth.py
@@ -128,7 +128,7 @@ async def get_current_admin_user(current_user: User = Depends(get_current_user))
|
|||||||
return current_user
|
return current_user
|
||||||
|
|
||||||
async def get_active_member(current_user: User = Depends(get_current_user)) -> User:
|
async def get_active_member(current_user: User = Depends(get_current_user)) -> User:
|
||||||
"""Require user to be active member or staff with valid status"""
|
"""Require user to be active member with valid payment"""
|
||||||
from models import UserStatus
|
from models import UserStatus
|
||||||
|
|
||||||
if current_user.status != UserStatus.active:
|
if current_user.status != UserStatus.active:
|
||||||
@@ -138,7 +138,7 @@ async def get_active_member(current_user: User = Depends(get_current_user)) -> U
|
|||||||
)
|
)
|
||||||
|
|
||||||
role_code = get_user_role_code(current_user)
|
role_code = get_user_role_code(current_user)
|
||||||
if role_code not in ["member", "admin", "superadmin", "finance"]:
|
if role_code not in ["member", "admin", "superadmin"]:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_403_FORBIDDEN,
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
detail="Member access only"
|
detail="Member access only"
|
||||||
|
|||||||
@@ -1,92 +0,0 @@
|
|||||||
-- Comprehensive check for all missing columns
|
|
||||||
-- Run: psql -h 10.9.23.11 -p 54321 -U postgres -d loaf_new -f check_all_columns.sql
|
|
||||||
|
|
||||||
\echo '================================================================'
|
|
||||||
\echo 'COMPREHENSIVE COLUMN CHECK FOR ALL TABLES'
|
|
||||||
\echo '================================================================'
|
|
||||||
|
|
||||||
-- ============================================================
|
|
||||||
-- 1. USERS TABLE
|
|
||||||
-- ============================================================
|
|
||||||
\echo ''
|
|
||||||
\echo '1. USERS TABLE - Expected: 60+ columns'
|
|
||||||
\echo 'Checking for specific columns:'
|
|
||||||
|
|
||||||
SELECT
|
|
||||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'newsletter_publish_name') THEN '✓' ELSE '✗' END || ' newsletter_publish_name',
|
|
||||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'volunteer_interests') THEN '✓' ELSE '✗' END || ' volunteer_interests',
|
|
||||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'scholarship_requested') THEN '✓' ELSE '✗' END || ' scholarship_requested',
|
|
||||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'show_in_directory') THEN '✓' ELSE '✗' END || ' show_in_directory',
|
|
||||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'password_reset_token') THEN '✓' ELSE '✗' END || ' password_reset_token',
|
|
||||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'accepts_tos') THEN '✓' ELSE '✗' END || ' accepts_tos',
|
|
||||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'member_since') THEN '✓' ELSE '✗' END || ' member_since',
|
|
||||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'rejection_reason') THEN '✓' ELSE '✗' END || ' rejection_reason',
|
|
||||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'import_source') THEN '✓' ELSE '✗' END || ' import_source'
|
|
||||||
\gx
|
|
||||||
|
|
||||||
-- ============================================================
|
|
||||||
-- 2. EVENTS TABLE
|
|
||||||
-- ============================================================
|
|
||||||
\echo ''
|
|
||||||
\echo '2. EVENTS TABLE'
|
|
||||||
|
|
||||||
SELECT
|
|
||||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'events' AND column_name = 'calendar_uid') THEN '✓' ELSE '✗' END || ' calendar_uid';
|
|
||||||
|
|
||||||
-- ============================================================
|
|
||||||
-- 3. SUBSCRIPTIONS TABLE
|
|
||||||
-- ============================================================
|
|
||||||
\echo ''
|
|
||||||
\echo '3. SUBSCRIPTIONS TABLE'
|
|
||||||
|
|
||||||
SELECT
|
|
||||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'subscriptions' AND column_name = 'base_subscription_cents') THEN '✓' ELSE '✗' END || ' base_subscription_cents',
|
|
||||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'subscriptions' AND column_name = 'donation_cents') THEN '✓' ELSE '✗' END || ' donation_cents',
|
|
||||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'subscriptions' AND column_name = 'manual_payment') THEN '✓' ELSE '✗' END || ' manual_payment'
|
|
||||||
\gx
|
|
||||||
|
|
||||||
-- ============================================================
|
|
||||||
-- 4. IMPORT_JOBS TABLE
|
|
||||||
-- ============================================================
|
|
||||||
\echo ''
|
|
||||||
\echo '4. IMPORT_JOBS TABLE'
|
|
||||||
|
|
||||||
SELECT
|
|
||||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'import_jobs' AND column_name = 'field_mapping') THEN '✓' ELSE '✗' END || ' field_mapping',
|
|
||||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'import_jobs' AND column_name = 'wordpress_metadata') THEN '✓' ELSE '✗' END || ' wordpress_metadata',
|
|
||||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'import_jobs' AND column_name = 'imported_user_ids') THEN '✓' ELSE '✗' END || ' imported_user_ids',
|
|
||||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'import_jobs' AND column_name = 'rollback_at') THEN '✓' ELSE '✗' END || ' rollback_at',
|
|
||||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'import_jobs' AND column_name = 'rollback_by') THEN '✓' ELSE '✗' END || ' rollback_by'
|
|
||||||
\gx
|
|
||||||
|
|
||||||
-- ============================================================
|
|
||||||
-- 5. CHECK IF IMPORT_ROLLBACK_AUDIT TABLE EXISTS
|
|
||||||
-- ============================================================
|
|
||||||
\echo ''
|
|
||||||
\echo '5. IMPORT_ROLLBACK_AUDIT TABLE - Should exist'
|
|
||||||
SELECT CASE
|
|
||||||
WHEN EXISTS (SELECT FROM information_schema.tables WHERE table_name = 'import_rollback_audit')
|
|
||||||
THEN '✓ Table exists'
|
|
||||||
ELSE '✗ TABLE MISSING - Need to create it'
|
|
||||||
END AS status;
|
|
||||||
|
|
||||||
-- ============================================================
|
|
||||||
-- SUMMARY: Count existing columns in each table
|
|
||||||
-- ============================================================
|
|
||||||
\echo ''
|
|
||||||
\echo '================================================================'
|
|
||||||
\echo 'SUMMARY: Column counts per table'
|
|
||||||
\echo '================================================================'
|
|
||||||
|
|
||||||
SELECT
|
|
||||||
table_name,
|
|
||||||
COUNT(*) as column_count
|
|
||||||
FROM information_schema.columns
|
|
||||||
WHERE table_name IN (
|
|
||||||
'users', 'events', 'event_rsvps', 'subscription_plans', 'subscriptions',
|
|
||||||
'donations', 'event_galleries', 'newsletter_archives', 'financial_reports',
|
|
||||||
'bylaws_documents', 'storage_usage', 'permissions', 'roles', 'role_permissions',
|
|
||||||
'user_invitations', 'import_jobs', 'import_rollback_audit'
|
|
||||||
)
|
|
||||||
GROUP BY table_name
|
|
||||||
ORDER BY table_name;
|
|
||||||
@@ -1,345 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
Database Integrity Checker
|
|
||||||
Compares schema and data integrity between development and production databases
|
|
||||||
"""
|
|
||||||
|
|
||||||
import sys
|
|
||||||
from sqlalchemy import create_engine, inspect, text
|
|
||||||
from sqlalchemy.engine import reflection
|
|
||||||
import json
|
|
||||||
from collections import defaultdict
|
|
||||||
|
|
||||||
# Database URLs
|
|
||||||
DEV_DB = "postgresql://postgres:RchhcpaUKZuZuMOvB5kwCP1weLBnAG6tNMXE5FHdk8AwCvolBMALYFVYRM7WCl9x@10.9.23.11:5001/membership_demo"
|
|
||||||
PROD_DB = "postgresql://postgres:fDv3fRvMgfPueDWDUxj27NJVaynsewIdh6b2Hb28tcvG3Ew6mhscASg2kulx4tr7@10.9.23.11:54321/loaf_new"
|
|
||||||
|
|
||||||
def get_db_info(engine, label):
|
|
||||||
"""Get comprehensive database information"""
|
|
||||||
inspector = inspect(engine)
|
|
||||||
|
|
||||||
info = {
|
|
||||||
'label': label,
|
|
||||||
'tables': {},
|
|
||||||
'indexes': {},
|
|
||||||
'foreign_keys': {},
|
|
||||||
'sequences': [],
|
|
||||||
'enums': []
|
|
||||||
}
|
|
||||||
|
|
||||||
# Get all table names
|
|
||||||
table_names = inspector.get_table_names()
|
|
||||||
|
|
||||||
for table_name in table_names:
|
|
||||||
# Get columns
|
|
||||||
columns = inspector.get_columns(table_name)
|
|
||||||
info['tables'][table_name] = {
|
|
||||||
'columns': {
|
|
||||||
col['name']: {
|
|
||||||
'type': str(col['type']),
|
|
||||||
'nullable': col['nullable'],
|
|
||||||
'default': str(col.get('default', None)),
|
|
||||||
'autoincrement': col.get('autoincrement', False)
|
|
||||||
}
|
|
||||||
for col in columns
|
|
||||||
},
|
|
||||||
'column_count': len(columns)
|
|
||||||
}
|
|
||||||
|
|
||||||
# Get primary keys
|
|
||||||
pk = inspector.get_pk_constraint(table_name)
|
|
||||||
info['tables'][table_name]['primary_key'] = pk.get('constrained_columns', [])
|
|
||||||
|
|
||||||
# Get indexes
|
|
||||||
indexes = inspector.get_indexes(table_name)
|
|
||||||
info['indexes'][table_name] = [
|
|
||||||
{
|
|
||||||
'name': idx['name'],
|
|
||||||
'columns': idx['column_names'],
|
|
||||||
'unique': idx['unique']
|
|
||||||
}
|
|
||||||
for idx in indexes
|
|
||||||
]
|
|
||||||
|
|
||||||
# Get foreign keys
|
|
||||||
fks = inspector.get_foreign_keys(table_name)
|
|
||||||
info['foreign_keys'][table_name] = [
|
|
||||||
{
|
|
||||||
'name': fk.get('name'),
|
|
||||||
'columns': fk['constrained_columns'],
|
|
||||||
'referred_table': fk['referred_table'],
|
|
||||||
'referred_columns': fk['referred_columns']
|
|
||||||
}
|
|
||||||
for fk in fks
|
|
||||||
]
|
|
||||||
|
|
||||||
# Get sequences
|
|
||||||
with engine.connect() as conn:
|
|
||||||
result = conn.execute(text("""
|
|
||||||
SELECT sequence_name
|
|
||||||
FROM information_schema.sequences
|
|
||||||
WHERE sequence_schema = 'public'
|
|
||||||
"""))
|
|
||||||
info['sequences'] = [row[0] for row in result]
|
|
||||||
|
|
||||||
# Get enum types
|
|
||||||
result = conn.execute(text("""
|
|
||||||
SELECT t.typname as enum_name,
|
|
||||||
array_agg(e.enumlabel ORDER BY e.enumsortorder) as enum_values
|
|
||||||
FROM pg_type t
|
|
||||||
JOIN pg_enum e ON t.oid = e.enumtypid
|
|
||||||
WHERE t.typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = 'public')
|
|
||||||
GROUP BY t.typname
|
|
||||||
"""))
|
|
||||||
info['enums'] = {row[0]: row[1] for row in result}
|
|
||||||
|
|
||||||
return info
|
|
||||||
|
|
||||||
def compare_tables(dev_info, prod_info):
|
|
||||||
"""Compare tables between databases"""
|
|
||||||
dev_tables = set(dev_info['tables'].keys())
|
|
||||||
prod_tables = set(prod_info['tables'].keys())
|
|
||||||
|
|
||||||
print("\n" + "="*80)
|
|
||||||
print("TABLE COMPARISON")
|
|
||||||
print("="*80)
|
|
||||||
|
|
||||||
# Tables only in dev
|
|
||||||
dev_only = dev_tables - prod_tables
|
|
||||||
if dev_only:
|
|
||||||
print(f"\n❌ Tables only in DEV ({len(dev_only)}):")
|
|
||||||
for table in sorted(dev_only):
|
|
||||||
print(f" - {table}")
|
|
||||||
|
|
||||||
# Tables only in prod
|
|
||||||
prod_only = prod_tables - dev_tables
|
|
||||||
if prod_only:
|
|
||||||
print(f"\n❌ Tables only in PROD ({len(prod_only)}):")
|
|
||||||
for table in sorted(prod_only):
|
|
||||||
print(f" - {table}")
|
|
||||||
|
|
||||||
# Common tables
|
|
||||||
common = dev_tables & prod_tables
|
|
||||||
print(f"\n✅ Common tables: {len(common)}")
|
|
||||||
|
|
||||||
return common
|
|
||||||
|
|
||||||
def compare_columns(dev_info, prod_info, common_tables):
|
|
||||||
"""Compare columns for common tables"""
|
|
||||||
print("\n" + "="*80)
|
|
||||||
print("COLUMN COMPARISON")
|
|
||||||
print("="*80)
|
|
||||||
|
|
||||||
issues = []
|
|
||||||
|
|
||||||
for table in sorted(common_tables):
|
|
||||||
dev_cols = set(dev_info['tables'][table]['columns'].keys())
|
|
||||||
prod_cols = set(prod_info['tables'][table]['columns'].keys())
|
|
||||||
|
|
||||||
dev_only = dev_cols - prod_cols
|
|
||||||
prod_only = prod_cols - dev_cols
|
|
||||||
|
|
||||||
if dev_only or prod_only:
|
|
||||||
print(f"\n⚠️ Table '{table}' has column differences:")
|
|
||||||
|
|
||||||
if dev_only:
|
|
||||||
print(f" Columns only in DEV: {', '.join(sorted(dev_only))}")
|
|
||||||
issues.append(f"{table}: DEV-only columns: {', '.join(dev_only)}")
|
|
||||||
|
|
||||||
if prod_only:
|
|
||||||
print(f" Columns only in PROD: {', '.join(sorted(prod_only))}")
|
|
||||||
issues.append(f"{table}: PROD-only columns: {', '.join(prod_only)}")
|
|
||||||
|
|
||||||
# Compare column types for common columns
|
|
||||||
common_cols = dev_cols & prod_cols
|
|
||||||
for col in common_cols:
|
|
||||||
dev_col = dev_info['tables'][table]['columns'][col]
|
|
||||||
prod_col = prod_info['tables'][table]['columns'][col]
|
|
||||||
|
|
||||||
if dev_col['type'] != prod_col['type']:
|
|
||||||
print(f" ⚠️ Column '{col}' type mismatch:")
|
|
||||||
print(f" DEV: {dev_col['type']}")
|
|
||||||
print(f" PROD: {prod_col['type']}")
|
|
||||||
issues.append(f"{table}.{col}: Type mismatch")
|
|
||||||
|
|
||||||
if dev_col['nullable'] != prod_col['nullable']:
|
|
||||||
print(f" ⚠️ Column '{col}' nullable mismatch:")
|
|
||||||
print(f" DEV: {dev_col['nullable']}")
|
|
||||||
print(f" PROD: {prod_col['nullable']}")
|
|
||||||
issues.append(f"{table}.{col}: Nullable mismatch")
|
|
||||||
|
|
||||||
if not issues:
|
|
||||||
print("\n✅ All columns match between DEV and PROD")
|
|
||||||
|
|
||||||
return issues
|
|
||||||
|
|
||||||
def compare_enums(dev_info, prod_info):
|
|
||||||
"""Compare enum types"""
|
|
||||||
print("\n" + "="*80)
|
|
||||||
print("ENUM TYPE COMPARISON")
|
|
||||||
print("="*80)
|
|
||||||
|
|
||||||
dev_enums = set(dev_info['enums'].keys())
|
|
||||||
prod_enums = set(prod_info['enums'].keys())
|
|
||||||
|
|
||||||
dev_only = dev_enums - prod_enums
|
|
||||||
prod_only = prod_enums - dev_enums
|
|
||||||
|
|
||||||
issues = []
|
|
||||||
|
|
||||||
if dev_only:
|
|
||||||
print(f"\n❌ Enums only in DEV: {', '.join(sorted(dev_only))}")
|
|
||||||
issues.extend([f"Enum '{e}' only in DEV" for e in dev_only])
|
|
||||||
|
|
||||||
if prod_only:
|
|
||||||
print(f"\n❌ Enums only in PROD: {', '.join(sorted(prod_only))}")
|
|
||||||
issues.extend([f"Enum '{e}' only in PROD" for e in prod_only])
|
|
||||||
|
|
||||||
# Compare enum values for common enums
|
|
||||||
common = dev_enums & prod_enums
|
|
||||||
for enum_name in sorted(common):
|
|
||||||
dev_values = set(dev_info['enums'][enum_name])
|
|
||||||
prod_values = set(prod_info['enums'][enum_name])
|
|
||||||
|
|
||||||
if dev_values != prod_values:
|
|
||||||
print(f"\n⚠️ Enum '{enum_name}' values differ:")
|
|
||||||
print(f" DEV: {', '.join(sorted(dev_values))}")
|
|
||||||
print(f" PROD: {', '.join(sorted(prod_values))}")
|
|
||||||
issues.append(f"Enum '{enum_name}' values differ")
|
|
||||||
|
|
||||||
if not issues:
|
|
||||||
print("\n✅ All enum types match")
|
|
||||||
|
|
||||||
return issues
|
|
||||||
|
|
||||||
def check_migration_history(dev_engine, prod_engine):
|
|
||||||
"""Check Alembic migration history"""
|
|
||||||
print("\n" + "="*80)
|
|
||||||
print("MIGRATION HISTORY")
|
|
||||||
print("="*80)
|
|
||||||
|
|
||||||
try:
|
|
||||||
with dev_engine.connect() as dev_conn:
|
|
||||||
dev_result = dev_conn.execute(text("SELECT version_num FROM alembic_version"))
|
|
||||||
dev_version = dev_result.fetchone()
|
|
||||||
dev_version = dev_version[0] if dev_version else None
|
|
||||||
|
|
||||||
with prod_engine.connect() as prod_conn:
|
|
||||||
prod_result = prod_conn.execute(text("SELECT version_num FROM alembic_version"))
|
|
||||||
prod_version = prod_result.fetchone()
|
|
||||||
prod_version = prod_version[0] if prod_version else None
|
|
||||||
|
|
||||||
print(f"\nDEV migration version: {dev_version}")
|
|
||||||
print(f"PROD migration version: {prod_version}")
|
|
||||||
|
|
||||||
if dev_version == prod_version:
|
|
||||||
print("✅ Migration versions match")
|
|
||||||
return []
|
|
||||||
else:
|
|
||||||
print("❌ Migration versions DO NOT match")
|
|
||||||
return ["Migration versions differ"]
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"⚠️ Could not check migration history: {str(e)}")
|
|
||||||
return [f"Migration check failed: {str(e)}"]
|
|
||||||
|
|
||||||
def get_row_counts(engine, tables):
|
|
||||||
"""Get row counts for all tables"""
|
|
||||||
counts = {}
|
|
||||||
with engine.connect() as conn:
|
|
||||||
for table in tables:
|
|
||||||
result = conn.execute(text(f"SELECT COUNT(*) FROM {table}"))
|
|
||||||
counts[table] = result.fetchone()[0]
|
|
||||||
return counts
|
|
||||||
|
|
||||||
def compare_data_counts(dev_engine, prod_engine, common_tables):
|
|
||||||
"""Compare row counts between databases"""
|
|
||||||
print("\n" + "="*80)
|
|
||||||
print("DATA ROW COUNTS")
|
|
||||||
print("="*80)
|
|
||||||
|
|
||||||
print("\nGetting DEV row counts...")
|
|
||||||
dev_counts = get_row_counts(dev_engine, common_tables)
|
|
||||||
|
|
||||||
print("Getting PROD row counts...")
|
|
||||||
prod_counts = get_row_counts(prod_engine, common_tables)
|
|
||||||
|
|
||||||
print(f"\n{'Table':<30} {'DEV':<15} {'PROD':<15} {'Diff':<15}")
|
|
||||||
print("-" * 75)
|
|
||||||
|
|
||||||
for table in sorted(common_tables):
|
|
||||||
dev_count = dev_counts[table]
|
|
||||||
prod_count = prod_counts[table]
|
|
||||||
diff = dev_count - prod_count
|
|
||||||
diff_str = f"+{diff}" if diff > 0 else str(diff)
|
|
||||||
|
|
||||||
status = "⚠️ " if abs(diff) > 0 else "✅"
|
|
||||||
print(f"{status} {table:<28} {dev_count:<15} {prod_count:<15} {diff_str:<15}")
|
|
||||||
|
|
||||||
def main():
|
|
||||||
print("\n" + "="*80)
|
|
||||||
print("DATABASE INTEGRITY CHECKER")
|
|
||||||
print("="*80)
|
|
||||||
print(f"\nDEV: {DEV_DB.split('@')[1]}") # Hide password
|
|
||||||
print(f"PROD: {PROD_DB.split('@')[1]}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Connect to databases
|
|
||||||
print("\n🔌 Connecting to databases...")
|
|
||||||
dev_engine = create_engine(DEV_DB)
|
|
||||||
prod_engine = create_engine(PROD_DB)
|
|
||||||
|
|
||||||
# Test connections
|
|
||||||
with dev_engine.connect() as conn:
|
|
||||||
conn.execute(text("SELECT 1"))
|
|
||||||
print("✅ Connected to DEV database")
|
|
||||||
|
|
||||||
with prod_engine.connect() as conn:
|
|
||||||
conn.execute(text("SELECT 1"))
|
|
||||||
print("✅ Connected to PROD database")
|
|
||||||
|
|
||||||
# Get database info
|
|
||||||
print("\n📊 Gathering database information...")
|
|
||||||
dev_info = get_db_info(dev_engine, "DEV")
|
|
||||||
prod_info = get_db_info(prod_engine, "PROD")
|
|
||||||
|
|
||||||
# Run comparisons
|
|
||||||
all_issues = []
|
|
||||||
|
|
||||||
common_tables = compare_tables(dev_info, prod_info)
|
|
||||||
|
|
||||||
column_issues = compare_columns(dev_info, prod_info, common_tables)
|
|
||||||
all_issues.extend(column_issues)
|
|
||||||
|
|
||||||
enum_issues = compare_enums(dev_info, prod_info)
|
|
||||||
all_issues.extend(enum_issues)
|
|
||||||
|
|
||||||
migration_issues = check_migration_history(dev_engine, prod_engine)
|
|
||||||
all_issues.extend(migration_issues)
|
|
||||||
|
|
||||||
compare_data_counts(dev_engine, prod_engine, common_tables)
|
|
||||||
|
|
||||||
# Summary
|
|
||||||
print("\n" + "="*80)
|
|
||||||
print("SUMMARY")
|
|
||||||
print("="*80)
|
|
||||||
|
|
||||||
if all_issues:
|
|
||||||
print(f"\n❌ Found {len(all_issues)} integrity issues:")
|
|
||||||
for i, issue in enumerate(all_issues, 1):
|
|
||||||
print(f" {i}. {issue}")
|
|
||||||
print("\n⚠️ Databases are NOT in sync!")
|
|
||||||
sys.exit(1)
|
|
||||||
else:
|
|
||||||
print("\n✅ Databases are in sync!")
|
|
||||||
print("✅ No integrity issues found")
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"\n❌ Error: {str(e)}")
|
|
||||||
import traceback
|
|
||||||
traceback.print_exc()
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
@@ -1,15 +1,38 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
"""
|
"""
|
||||||
Create Superadmin User Script
|
Create Superadmin User Script
|
||||||
Directly creates a superadmin user in the database for LOAF membership platform
|
Generates a superadmin user with hashed password for LOAF membership platform
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import bcrypt
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
from getpass import getpass
|
from getpass import getpass
|
||||||
|
|
||||||
# Add the backend directory to path for imports
|
def generate_password_hash(password: str) -> str:
|
||||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
"""Generate bcrypt hash for password"""
|
||||||
|
return bcrypt.hashpw(password.encode(), bcrypt.gensalt()).decode()
|
||||||
|
|
||||||
|
def generate_sql(email: str, password_hash: str, first_name: str, last_name: str) -> str:
|
||||||
|
"""Generate SQL INSERT statement"""
|
||||||
|
return f"""
|
||||||
|
-- Create Superadmin User
|
||||||
|
INSERT INTO users (
|
||||||
|
id, email, password_hash, first_name, last_name,
|
||||||
|
status, role, email_verified, created_at, updated_at
|
||||||
|
) VALUES (
|
||||||
|
gen_random_uuid(),
|
||||||
|
'{email}',
|
||||||
|
'{password_hash}',
|
||||||
|
'{first_name}',
|
||||||
|
'{last_name}',
|
||||||
|
'active',
|
||||||
|
'superadmin',
|
||||||
|
true,
|
||||||
|
NOW(),
|
||||||
|
NOW()
|
||||||
|
);
|
||||||
|
"""
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
print("=" * 70)
|
print("=" * 70)
|
||||||
@@ -17,15 +40,6 @@ def main():
|
|||||||
print("=" * 70)
|
print("=" * 70)
|
||||||
print()
|
print()
|
||||||
|
|
||||||
# Check for DATABASE_URL
|
|
||||||
from dotenv import load_dotenv
|
|
||||||
load_dotenv()
|
|
||||||
|
|
||||||
database_url = os.getenv("DATABASE_URL")
|
|
||||||
if not database_url:
|
|
||||||
print("❌ DATABASE_URL not found in environment or .env file")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
# Get user input
|
# Get user input
|
||||||
email = input("Email address: ").strip()
|
email = input("Email address: ").strip()
|
||||||
if not email or '@' not in email:
|
if not email or '@' not in email:
|
||||||
@@ -54,89 +68,31 @@ def main():
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
print()
|
print()
|
||||||
print("Creating superadmin user...")
|
print("Generating password hash...")
|
||||||
|
password_hash = generate_password_hash(password)
|
||||||
try:
|
|
||||||
# Import database dependencies
|
|
||||||
from sqlalchemy import create_engine, text
|
|
||||||
from passlib.context import CryptContext
|
|
||||||
|
|
||||||
# Create password hash
|
|
||||||
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
|
||||||
password_hash = pwd_context.hash(password)
|
|
||||||
|
|
||||||
# Connect to database
|
|
||||||
engine = create_engine(database_url)
|
|
||||||
|
|
||||||
with engine.connect() as conn:
|
|
||||||
# Check if user already exists
|
|
||||||
result = conn.execute(
|
|
||||||
text("SELECT id FROM users WHERE email = :email"),
|
|
||||||
{"email": email}
|
|
||||||
)
|
|
||||||
if result.fetchone():
|
|
||||||
print(f"❌ User with email '{email}' already exists")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
# Insert superadmin user
|
|
||||||
conn.execute(
|
|
||||||
text("""
|
|
||||||
INSERT INTO users (
|
|
||||||
id, email, password_hash, first_name, last_name,
|
|
||||||
phone, address, city, state, zipcode, date_of_birth,
|
|
||||||
status, role, email_verified,
|
|
||||||
newsletter_subscribed, accepts_tos,
|
|
||||||
created_at, updated_at
|
|
||||||
) VALUES (
|
|
||||||
gen_random_uuid(),
|
|
||||||
:email,
|
|
||||||
:password_hash,
|
|
||||||
:first_name,
|
|
||||||
:last_name,
|
|
||||||
'',
|
|
||||||
'',
|
|
||||||
'',
|
|
||||||
'',
|
|
||||||
'',
|
|
||||||
'1990-01-01',
|
|
||||||
'active',
|
|
||||||
'superadmin',
|
|
||||||
true,
|
|
||||||
false,
|
|
||||||
true,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
)
|
|
||||||
"""),
|
|
||||||
{
|
|
||||||
"email": email,
|
|
||||||
"password_hash": password_hash,
|
|
||||||
"first_name": first_name,
|
|
||||||
"last_name": last_name
|
|
||||||
}
|
|
||||||
)
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
|
print("✅ Password hash generated")
|
||||||
print()
|
print()
|
||||||
print("=" * 70)
|
print("=" * 70)
|
||||||
print("✅ Superadmin user created successfully!")
|
print("SQL STATEMENT")
|
||||||
print("=" * 70)
|
|
||||||
print()
|
|
||||||
print(f" Email: {email}")
|
|
||||||
print(f" Name: {first_name} {last_name}")
|
|
||||||
print(f" Role: superadmin")
|
|
||||||
print(f" Status: active")
|
|
||||||
print()
|
|
||||||
print("You can now log in with these credentials.")
|
|
||||||
print("=" * 70)
|
print("=" * 70)
|
||||||
|
|
||||||
except ImportError as e:
|
sql = generate_sql(email, password_hash, first_name, last_name)
|
||||||
print(f"❌ Missing dependency: {e}")
|
print(sql)
|
||||||
print(" Run: pip install sqlalchemy psycopg2-binary passlib python-dotenv")
|
|
||||||
sys.exit(1)
|
# Save to file
|
||||||
except Exception as e:
|
output_file = "create_superadmin.sql"
|
||||||
print(f"❌ Database error: {e}")
|
with open(output_file, 'w') as f:
|
||||||
sys.exit(1)
|
f.write(sql)
|
||||||
|
|
||||||
|
print("=" * 70)
|
||||||
|
print(f"✅ SQL saved to: {output_file}")
|
||||||
|
print()
|
||||||
|
print("Run this command to create the user:")
|
||||||
|
print(f" psql -U postgres -d loaf_new -f {output_file}")
|
||||||
|
print()
|
||||||
|
print("Or copy the SQL above and run it directly in psql")
|
||||||
|
print("=" * 70)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
try:
|
try:
|
||||||
@@ -144,3 +100,6 @@ if __name__ == "__main__":
|
|||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print("\n\n❌ Cancelled by user")
|
print("\n\n❌ Cancelled by user")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"\n❌ Error: {e}")
|
||||||
|
sys.exit(1)
|
||||||
|
|||||||
17
database.py
17
database.py
@@ -1,7 +1,6 @@
|
|||||||
from sqlalchemy import create_engine
|
from sqlalchemy import create_engine
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
from sqlalchemy.pool import QueuePool
|
|
||||||
import os
|
import os
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -11,21 +10,7 @@ load_dotenv(ROOT_DIR / '.env')
|
|||||||
|
|
||||||
DATABASE_URL = os.environ.get('DATABASE_URL', 'postgresql://user:password@localhost:5432/membership_db')
|
DATABASE_URL = os.environ.get('DATABASE_URL', 'postgresql://user:password@localhost:5432/membership_db')
|
||||||
|
|
||||||
# Configure engine with connection pooling and connection health checks
|
engine = create_engine(DATABASE_URL)
|
||||||
engine = create_engine(
|
|
||||||
DATABASE_URL,
|
|
||||||
poolclass=QueuePool,
|
|
||||||
pool_size=5, # Keep 5 connections open
|
|
||||||
max_overflow=10, # Allow up to 10 extra connections during peak
|
|
||||||
pool_pre_ping=True, # CRITICAL: Test connections before using them
|
|
||||||
pool_recycle=3600, # Recycle connections every hour (prevents stale connections)
|
|
||||||
echo=False, # Set to True for SQL debugging
|
|
||||||
connect_args={
|
|
||||||
'connect_timeout': 10, # Timeout connection attempts after 10 seconds
|
|
||||||
'options': '-c statement_timeout=30000' # 30 second query timeout
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||||
|
|
||||||
Base = declarative_base()
|
Base = declarative_base()
|
||||||
|
|||||||
@@ -1,122 +0,0 @@
|
|||||||
"""
|
|
||||||
Encryption service for sensitive settings stored in database.
|
|
||||||
|
|
||||||
Uses Fernet symmetric encryption (AES-128 in CBC mode with HMAC authentication).
|
|
||||||
The encryption key is derived from a master secret stored in .env.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import base64
|
|
||||||
from cryptography.fernet import Fernet
|
|
||||||
from cryptography.hazmat.primitives import hashes
|
|
||||||
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
|
||||||
from cryptography.hazmat.backends import default_backend
|
|
||||||
|
|
||||||
|
|
||||||
class EncryptionService:
|
|
||||||
"""Service for encrypting and decrypting sensitive configuration values"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
# Get master encryption key from environment
|
|
||||||
# This should be a long, random string (e.g., 64 characters)
|
|
||||||
# Generate one with: python -c "import secrets; print(secrets.token_urlsafe(64))"
|
|
||||||
self.master_secret = os.environ.get('SETTINGS_ENCRYPTION_KEY')
|
|
||||||
|
|
||||||
if not self.master_secret:
|
|
||||||
raise ValueError(
|
|
||||||
"SETTINGS_ENCRYPTION_KEY environment variable not set. "
|
|
||||||
"Generate one with: python -c \"import secrets; print(secrets.token_urlsafe(64))\""
|
|
||||||
)
|
|
||||||
|
|
||||||
# Derive encryption key from master secret using PBKDF2HMAC
|
|
||||||
# This adds an extra layer of security
|
|
||||||
kdf = PBKDF2HMAC(
|
|
||||||
algorithm=hashes.SHA256(),
|
|
||||||
length=32,
|
|
||||||
salt=b'systemsettings', # Fixed salt (OK for key derivation from strong secret)
|
|
||||||
iterations=100000,
|
|
||||||
backend=default_backend()
|
|
||||||
)
|
|
||||||
key = base64.urlsafe_b64encode(kdf.derive(self.master_secret.encode()))
|
|
||||||
self.cipher = Fernet(key)
|
|
||||||
|
|
||||||
def encrypt(self, plaintext: str) -> str:
|
|
||||||
"""
|
|
||||||
Encrypt a plaintext string.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
plaintext: The string to encrypt
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Base64-encoded encrypted string
|
|
||||||
"""
|
|
||||||
if not plaintext:
|
|
||||||
return ""
|
|
||||||
|
|
||||||
encrypted_bytes = self.cipher.encrypt(plaintext.encode())
|
|
||||||
return encrypted_bytes.decode('utf-8')
|
|
||||||
|
|
||||||
def decrypt(self, encrypted: str) -> str:
|
|
||||||
"""
|
|
||||||
Decrypt an encrypted string.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
encrypted: The base64-encoded encrypted string
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Decrypted plaintext string
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
cryptography.fernet.InvalidToken: If decryption fails (wrong key or corrupted data)
|
|
||||||
"""
|
|
||||||
if not encrypted:
|
|
||||||
return ""
|
|
||||||
|
|
||||||
decrypted_bytes = self.cipher.decrypt(encrypted.encode())
|
|
||||||
return decrypted_bytes.decode('utf-8')
|
|
||||||
|
|
||||||
def is_encrypted(self, value: str) -> bool:
|
|
||||||
"""
|
|
||||||
Check if a value appears to be encrypted (starts with Fernet token format).
|
|
||||||
|
|
||||||
This is a heuristic check - not 100% reliable but useful for validation.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
value: String to check
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if value looks like a Fernet token
|
|
||||||
"""
|
|
||||||
if not value:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Fernet tokens are base64-encoded and start with version byte (gAAAAA...)
|
|
||||||
# They're always > 60 characters
|
|
||||||
try:
|
|
||||||
return len(value) > 60 and value.startswith('gAAAAA')
|
|
||||||
except:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
# Global encryption service instance
|
|
||||||
# Initialize on module import so it fails fast if encryption key is missing
|
|
||||||
try:
|
|
||||||
encryption_service = EncryptionService()
|
|
||||||
except ValueError as e:
|
|
||||||
print(f"WARNING: {e}")
|
|
||||||
print("Encryption service will not be available.")
|
|
||||||
encryption_service = None
|
|
||||||
|
|
||||||
|
|
||||||
def get_encryption_service() -> EncryptionService:
|
|
||||||
"""
|
|
||||||
Get the global encryption service instance.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: If encryption service is not initialized (missing SETTINGS_ENCRYPTION_KEY)
|
|
||||||
"""
|
|
||||||
if encryption_service is None:
|
|
||||||
raise ValueError(
|
|
||||||
"Encryption service not initialized. Set SETTINGS_ENCRYPTION_KEY environment variable."
|
|
||||||
)
|
|
||||||
return encryption_service
|
|
||||||
1133
import_templates.py
1133
import_templates.py
File diff suppressed because it is too large
Load Diff
@@ -94,30 +94,6 @@ BEGIN;
|
|||||||
-- SECTION 2: Create Core Tables
|
-- SECTION 2: Create Core Tables
|
||||||
-- ============================================================================
|
-- ============================================================================
|
||||||
|
|
||||||
-- Import Jobs table (must be created before users due to FK reference)
|
|
||||||
CREATE TABLE IF NOT EXISTS import_jobs (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
|
|
||||||
filename VARCHAR NOT NULL,
|
|
||||||
status importjobstatus NOT NULL DEFAULT 'processing',
|
|
||||||
total_rows INTEGER DEFAULT 0,
|
|
||||||
processed_rows INTEGER DEFAULT 0,
|
|
||||||
success_count INTEGER DEFAULT 0,
|
|
||||||
error_count INTEGER DEFAULT 0,
|
|
||||||
error_log JSONB DEFAULT '[]'::jsonb,
|
|
||||||
|
|
||||||
-- WordPress import enhancements
|
|
||||||
field_mapping JSONB DEFAULT '{}'::jsonb,
|
|
||||||
wordpress_metadata JSONB DEFAULT '{}'::jsonb,
|
|
||||||
imported_user_ids JSONB DEFAULT '[]'::jsonb,
|
|
||||||
rollback_at TIMESTAMP WITH TIME ZONE,
|
|
||||||
rollback_by UUID, -- Will be updated with FK after users table exists
|
|
||||||
|
|
||||||
started_by UUID, -- Will be updated with FK after users table exists
|
|
||||||
started_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
completed_at TIMESTAMP WITH TIME ZONE
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Users table
|
-- Users table
|
||||||
CREATE TABLE IF NOT EXISTS users (
|
CREATE TABLE IF NOT EXISTS users (
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
@@ -127,7 +103,6 @@ CREATE TABLE IF NOT EXISTS users (
|
|||||||
password_hash VARCHAR NOT NULL,
|
password_hash VARCHAR NOT NULL,
|
||||||
email_verified BOOLEAN NOT NULL DEFAULT FALSE,
|
email_verified BOOLEAN NOT NULL DEFAULT FALSE,
|
||||||
email_verification_token VARCHAR UNIQUE,
|
email_verification_token VARCHAR UNIQUE,
|
||||||
email_verification_expires TIMESTAMP WITH TIME ZONE,
|
|
||||||
|
|
||||||
-- Personal Information
|
-- Personal Information
|
||||||
first_name VARCHAR NOT NULL,
|
first_name VARCHAR NOT NULL,
|
||||||
@@ -138,6 +113,7 @@ CREATE TABLE IF NOT EXISTS users (
|
|||||||
state VARCHAR(2),
|
state VARCHAR(2),
|
||||||
zipcode VARCHAR(10),
|
zipcode VARCHAR(10),
|
||||||
date_of_birth DATE,
|
date_of_birth DATE,
|
||||||
|
bio TEXT,
|
||||||
|
|
||||||
-- Profile
|
-- Profile
|
||||||
profile_photo_url VARCHAR,
|
profile_photo_url VARCHAR,
|
||||||
@@ -161,44 +137,20 @@ CREATE TABLE IF NOT EXISTS users (
|
|||||||
-- Status & Role
|
-- Status & Role
|
||||||
status userstatus NOT NULL DEFAULT 'pending_email',
|
status userstatus NOT NULL DEFAULT 'pending_email',
|
||||||
role userrole NOT NULL DEFAULT 'guest',
|
role userrole NOT NULL DEFAULT 'guest',
|
||||||
role_id UUID, -- For dynamic RBAC
|
role_id UUID, -- For dynamic RBAC (added in later migration)
|
||||||
|
|
||||||
-- Newsletter Preferences
|
-- Rejection Tracking
|
||||||
newsletter_subscribed BOOLEAN DEFAULT TRUE,
|
rejection_reason TEXT,
|
||||||
newsletter_publish_name BOOLEAN DEFAULT FALSE NOT NULL,
|
rejected_at TIMESTAMP WITH TIME ZONE,
|
||||||
newsletter_publish_photo BOOLEAN DEFAULT FALSE NOT NULL,
|
rejected_by UUID REFERENCES users(id),
|
||||||
newsletter_publish_birthday BOOLEAN DEFAULT FALSE NOT NULL,
|
|
||||||
newsletter_publish_none BOOLEAN DEFAULT FALSE NOT NULL,
|
|
||||||
|
|
||||||
-- Volunteer Interests
|
|
||||||
volunteer_interests JSONB DEFAULT '[]'::jsonb,
|
|
||||||
|
|
||||||
-- Scholarship Request
|
|
||||||
scholarship_requested BOOLEAN DEFAULT FALSE NOT NULL,
|
|
||||||
scholarship_reason TEXT,
|
|
||||||
|
|
||||||
-- Directory Settings
|
|
||||||
show_in_directory BOOLEAN DEFAULT FALSE NOT NULL,
|
|
||||||
directory_email VARCHAR,
|
|
||||||
directory_bio TEXT,
|
|
||||||
directory_address VARCHAR,
|
|
||||||
directory_phone VARCHAR,
|
|
||||||
directory_dob DATE,
|
|
||||||
directory_partner_name VARCHAR,
|
|
||||||
|
|
||||||
-- Password Reset
|
|
||||||
password_reset_token VARCHAR,
|
|
||||||
password_reset_expires TIMESTAMP WITH TIME ZONE,
|
|
||||||
force_password_change BOOLEAN DEFAULT FALSE NOT NULL,
|
|
||||||
|
|
||||||
-- Terms of Service
|
|
||||||
accepts_tos BOOLEAN DEFAULT FALSE NOT NULL,
|
|
||||||
tos_accepted_at TIMESTAMP WITH TIME ZONE,
|
|
||||||
|
|
||||||
-- Membership
|
-- Membership
|
||||||
member_since DATE,
|
member_since DATE,
|
||||||
|
accepts_tos BOOLEAN DEFAULT FALSE,
|
||||||
|
tos_accepted_at TIMESTAMP WITH TIME ZONE,
|
||||||
|
newsletter_subscribed BOOLEAN DEFAULT TRUE,
|
||||||
|
|
||||||
-- Reminder Tracking
|
-- Reminder Tracking (from migration 004)
|
||||||
email_verification_reminders_sent INTEGER DEFAULT 0 NOT NULL,
|
email_verification_reminders_sent INTEGER DEFAULT 0 NOT NULL,
|
||||||
last_email_verification_reminder_at TIMESTAMP WITH TIME ZONE,
|
last_email_verification_reminder_at TIMESTAMP WITH TIME ZONE,
|
||||||
event_attendance_reminders_sent INTEGER DEFAULT 0 NOT NULL,
|
event_attendance_reminders_sent INTEGER DEFAULT 0 NOT NULL,
|
||||||
@@ -208,21 +160,12 @@ CREATE TABLE IF NOT EXISTS users (
|
|||||||
renewal_reminders_sent INTEGER DEFAULT 0 NOT NULL,
|
renewal_reminders_sent INTEGER DEFAULT 0 NOT NULL,
|
||||||
last_renewal_reminder_at TIMESTAMP WITH TIME ZONE,
|
last_renewal_reminder_at TIMESTAMP WITH TIME ZONE,
|
||||||
|
|
||||||
-- Rejection Tracking
|
|
||||||
rejection_reason TEXT,
|
|
||||||
rejected_at TIMESTAMP WITH TIME ZONE,
|
|
||||||
rejected_by UUID REFERENCES users(id),
|
|
||||||
|
|
||||||
-- WordPress Import Tracking
|
-- WordPress Import Tracking
|
||||||
import_source VARCHAR(50),
|
import_source VARCHAR(50),
|
||||||
import_job_id UUID REFERENCES import_jobs(id),
|
import_job_id UUID REFERENCES import_jobs(id),
|
||||||
wordpress_user_id BIGINT,
|
wordpress_user_id BIGINT,
|
||||||
wordpress_registered_date TIMESTAMP WITH TIME ZONE,
|
wordpress_registered_date TIMESTAMP WITH TIME ZONE,
|
||||||
|
|
||||||
-- Role Change Audit Trail
|
|
||||||
role_changed_at TIMESTAMP WITH TIME ZONE,
|
|
||||||
role_changed_by UUID REFERENCES users(id) ON DELETE SET NULL,
|
|
||||||
|
|
||||||
-- Timestamps
|
-- Timestamps
|
||||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||||
@@ -312,23 +255,11 @@ CREATE TABLE IF NOT EXISTS subscription_plans (
|
|||||||
name VARCHAR NOT NULL,
|
name VARCHAR NOT NULL,
|
||||||
description TEXT,
|
description TEXT,
|
||||||
price_cents INTEGER NOT NULL,
|
price_cents INTEGER NOT NULL,
|
||||||
billing_cycle VARCHAR NOT NULL DEFAULT 'yearly',
|
billing_cycle VARCHAR NOT NULL DEFAULT 'annual',
|
||||||
stripe_price_id VARCHAR, -- Legacy, deprecated
|
|
||||||
|
|
||||||
-- Configuration
|
-- Configuration
|
||||||
active BOOLEAN NOT NULL DEFAULT TRUE,
|
active BOOLEAN NOT NULL DEFAULT TRUE,
|
||||||
|
features JSONB DEFAULT '[]'::jsonb,
|
||||||
-- Custom billing cycle fields (for recurring date ranges like Jan 1 - Dec 31)
|
|
||||||
custom_cycle_enabled BOOLEAN DEFAULT FALSE NOT NULL,
|
|
||||||
custom_cycle_start_month INTEGER,
|
|
||||||
custom_cycle_start_day INTEGER,
|
|
||||||
custom_cycle_end_month INTEGER,
|
|
||||||
custom_cycle_end_day INTEGER,
|
|
||||||
|
|
||||||
-- Dynamic pricing fields
|
|
||||||
minimum_price_cents INTEGER DEFAULT 3000 NOT NULL,
|
|
||||||
suggested_price_cents INTEGER,
|
|
||||||
allow_donation BOOLEAN DEFAULT TRUE NOT NULL,
|
|
||||||
|
|
||||||
-- Timestamps
|
-- Timestamps
|
||||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
@@ -350,21 +281,13 @@ CREATE TABLE IF NOT EXISTS subscriptions (
|
|||||||
status subscriptionstatus DEFAULT 'active',
|
status subscriptionstatus DEFAULT 'active',
|
||||||
start_date TIMESTAMP WITH TIME ZONE NOT NULL,
|
start_date TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||||
end_date TIMESTAMP WITH TIME ZONE,
|
end_date TIMESTAMP WITH TIME ZONE,
|
||||||
|
next_billing_date TIMESTAMP WITH TIME ZONE,
|
||||||
|
|
||||||
-- Payment Details
|
-- Payment Details
|
||||||
amount_paid_cents INTEGER,
|
amount_paid_cents INTEGER,
|
||||||
base_subscription_cents INTEGER NOT NULL,
|
base_subscription_cents INTEGER NOT NULL,
|
||||||
donation_cents INTEGER DEFAULT 0 NOT NULL,
|
donation_cents INTEGER DEFAULT 0 NOT NULL,
|
||||||
|
|
||||||
-- Stripe transaction metadata (for validation and audit)
|
|
||||||
stripe_payment_intent_id VARCHAR,
|
|
||||||
stripe_charge_id VARCHAR,
|
|
||||||
stripe_invoice_id VARCHAR,
|
|
||||||
payment_completed_at TIMESTAMP WITH TIME ZONE,
|
|
||||||
card_last4 VARCHAR(4),
|
|
||||||
card_brand VARCHAR(20),
|
|
||||||
stripe_receipt_url VARCHAR,
|
|
||||||
|
|
||||||
-- Manual Payment Support
|
-- Manual Payment Support
|
||||||
manual_payment BOOLEAN DEFAULT FALSE NOT NULL,
|
manual_payment BOOLEAN DEFAULT FALSE NOT NULL,
|
||||||
manual_payment_notes TEXT,
|
manual_payment_notes TEXT,
|
||||||
@@ -396,14 +319,6 @@ CREATE TABLE IF NOT EXISTS donations (
|
|||||||
stripe_payment_intent_id VARCHAR,
|
stripe_payment_intent_id VARCHAR,
|
||||||
payment_method VARCHAR,
|
payment_method VARCHAR,
|
||||||
|
|
||||||
-- Stripe transaction metadata (for validation and audit)
|
|
||||||
stripe_charge_id VARCHAR,
|
|
||||||
stripe_customer_id VARCHAR,
|
|
||||||
payment_completed_at TIMESTAMP WITH TIME ZONE,
|
|
||||||
card_last4 VARCHAR(4),
|
|
||||||
card_brand VARCHAR(20),
|
|
||||||
stripe_receipt_url VARCHAR,
|
|
||||||
|
|
||||||
-- Metadata
|
-- Metadata
|
||||||
notes TEXT,
|
notes TEXT,
|
||||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
@@ -530,7 +445,7 @@ CREATE TABLE IF NOT EXISTS storage_usage (
|
|||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
|
||||||
total_bytes_used BIGINT NOT NULL DEFAULT 0,
|
total_bytes_used BIGINT NOT NULL DEFAULT 0,
|
||||||
max_bytes_allowed BIGINT NOT NULL DEFAULT 1073741824, -- 1GB
|
max_bytes_allowed BIGINT NOT NULL DEFAULT 10737418240, -- 10GB
|
||||||
last_updated TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
last_updated TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -551,10 +466,29 @@ CREATE TABLE IF NOT EXISTS user_invitations (
|
|||||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Add FK constraints to import_jobs (now that users table exists)
|
-- Import Jobs table
|
||||||
ALTER TABLE import_jobs
|
CREATE TABLE IF NOT EXISTS import_jobs (
|
||||||
ADD CONSTRAINT fk_import_jobs_rollback_by FOREIGN KEY (rollback_by) REFERENCES users(id),
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
ADD CONSTRAINT fk_import_jobs_started_by FOREIGN KEY (started_by) REFERENCES users(id);
|
|
||||||
|
filename VARCHAR NOT NULL,
|
||||||
|
status importjobstatus NOT NULL DEFAULT 'processing',
|
||||||
|
total_rows INTEGER DEFAULT 0,
|
||||||
|
processed_rows INTEGER DEFAULT 0,
|
||||||
|
success_count INTEGER DEFAULT 0,
|
||||||
|
error_count INTEGER DEFAULT 0,
|
||||||
|
error_log JSONB DEFAULT '[]'::jsonb,
|
||||||
|
|
||||||
|
-- WordPress import enhancements
|
||||||
|
field_mapping JSONB DEFAULT '{}'::jsonb,
|
||||||
|
wordpress_metadata JSONB DEFAULT '{}'::jsonb,
|
||||||
|
imported_user_ids JSONB DEFAULT '[]'::jsonb,
|
||||||
|
rollback_at TIMESTAMP WITH TIME ZONE,
|
||||||
|
rollback_by UUID REFERENCES users(id),
|
||||||
|
|
||||||
|
started_by UUID REFERENCES users(id),
|
||||||
|
started_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
completed_at TIMESTAMP WITH TIME ZONE
|
||||||
|
);
|
||||||
|
|
||||||
-- Import Rollback Audit table (for tracking rollback operations)
|
-- Import Rollback Audit table (for tracking rollback operations)
|
||||||
CREATE TABLE IF NOT EXISTS import_rollback_audit (
|
CREATE TABLE IF NOT EXISTS import_rollback_audit (
|
||||||
@@ -608,18 +542,12 @@ CREATE INDEX IF NOT EXISTS idx_subscriptions_user_id ON subscriptions(user_id);
|
|||||||
CREATE INDEX IF NOT EXISTS idx_subscriptions_plan_id ON subscriptions(plan_id);
|
CREATE INDEX IF NOT EXISTS idx_subscriptions_plan_id ON subscriptions(plan_id);
|
||||||
CREATE INDEX IF NOT EXISTS idx_subscriptions_status ON subscriptions(status);
|
CREATE INDEX IF NOT EXISTS idx_subscriptions_status ON subscriptions(status);
|
||||||
CREATE INDEX IF NOT EXISTS idx_subscriptions_stripe_subscription_id ON subscriptions(stripe_subscription_id);
|
CREATE INDEX IF NOT EXISTS idx_subscriptions_stripe_subscription_id ON subscriptions(stripe_subscription_id);
|
||||||
CREATE INDEX IF NOT EXISTS idx_subscriptions_payment_intent ON subscriptions(stripe_payment_intent_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_subscriptions_charge_id ON subscriptions(stripe_charge_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_subscriptions_invoice_id ON subscriptions(stripe_invoice_id);
|
|
||||||
|
|
||||||
-- Donations indexes
|
-- Donations indexes
|
||||||
CREATE INDEX IF NOT EXISTS idx_donation_user ON donations(user_id);
|
CREATE INDEX IF NOT EXISTS idx_donation_user ON donations(user_id);
|
||||||
CREATE INDEX IF NOT EXISTS idx_donation_type ON donations(donation_type);
|
CREATE INDEX IF NOT EXISTS idx_donation_type ON donations(donation_type);
|
||||||
CREATE INDEX IF NOT EXISTS idx_donation_status ON donations(status);
|
CREATE INDEX IF NOT EXISTS idx_donation_status ON donations(status);
|
||||||
CREATE INDEX IF NOT EXISTS idx_donation_created ON donations(created_at);
|
CREATE INDEX IF NOT EXISTS idx_donation_created ON donations(created_at);
|
||||||
CREATE INDEX IF NOT EXISTS idx_donation_payment_intent ON donations(stripe_payment_intent_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_donation_charge_id ON donations(stripe_charge_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_donation_customer_id ON donations(stripe_customer_id);
|
|
||||||
|
|
||||||
-- Import Jobs indexes
|
-- Import Jobs indexes
|
||||||
CREATE INDEX IF NOT EXISTS idx_import_jobs_status ON import_jobs(status);
|
CREATE INDEX IF NOT EXISTS idx_import_jobs_status ON import_jobs(status);
|
||||||
@@ -659,7 +587,7 @@ INSERT INTO storage_usage (id, total_bytes_used, max_bytes_allowed, last_updated
|
|||||||
SELECT
|
SELECT
|
||||||
gen_random_uuid(),
|
gen_random_uuid(),
|
||||||
0,
|
0,
|
||||||
1073741824, -- 1GB
|
10737418240, -- 10GB
|
||||||
CURRENT_TIMESTAMP
|
CURRENT_TIMESTAMP
|
||||||
WHERE NOT EXISTS (SELECT 1 FROM storage_usage);
|
WHERE NOT EXISTS (SELECT 1 FROM storage_usage);
|
||||||
|
|
||||||
|
|||||||
117
models.py
117
models.py
@@ -44,13 +44,6 @@ class DonationStatus(enum.Enum):
|
|||||||
completed = "completed"
|
completed = "completed"
|
||||||
failed = "failed"
|
failed = "failed"
|
||||||
|
|
||||||
|
|
||||||
class PaymentMethodType(enum.Enum):
|
|
||||||
card = "card"
|
|
||||||
cash = "cash"
|
|
||||||
bank_transfer = "bank_transfer"
|
|
||||||
check = "check"
|
|
||||||
|
|
||||||
class User(Base):
|
class User(Base):
|
||||||
__tablename__ = "users"
|
__tablename__ = "users"
|
||||||
|
|
||||||
@@ -144,17 +137,6 @@ class User(Base):
|
|||||||
wordpress_user_id = Column(BigInteger, nullable=True, comment="Original WordPress user ID")
|
wordpress_user_id = Column(BigInteger, nullable=True, comment="Original WordPress user ID")
|
||||||
wordpress_registered_date = Column(DateTime(timezone=True), nullable=True, comment="Original WordPress registration date")
|
wordpress_registered_date = Column(DateTime(timezone=True), nullable=True, comment="Original WordPress registration date")
|
||||||
|
|
||||||
# Role Change Audit Trail
|
|
||||||
role_changed_at = Column(DateTime(timezone=True), nullable=True, comment="Timestamp when role was last changed")
|
|
||||||
role_changed_by = Column(UUID(as_uuid=True), ForeignKey('users.id', ondelete='SET NULL'), nullable=True, comment="Admin who changed the role")
|
|
||||||
|
|
||||||
# Stripe Customer ID - Centralized for payment method management
|
|
||||||
stripe_customer_id = Column(String, nullable=True, index=True, comment="Stripe Customer ID for payment method management")
|
|
||||||
|
|
||||||
# Dynamic Registration Form - Custom field responses
|
|
||||||
custom_registration_data = Column(JSON, default=dict, nullable=False,
|
|
||||||
comment="Dynamic registration field responses stored as JSON for custom form fields")
|
|
||||||
|
|
||||||
created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc))
|
created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc))
|
||||||
updated_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
|
updated_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
|
||||||
|
|
||||||
@@ -163,53 +145,6 @@ class User(Base):
|
|||||||
events_created = relationship("Event", back_populates="creator")
|
events_created = relationship("Event", back_populates="creator")
|
||||||
rsvps = relationship("EventRSVP", back_populates="user")
|
rsvps = relationship("EventRSVP", back_populates="user")
|
||||||
subscriptions = relationship("Subscription", back_populates="user", foreign_keys="Subscription.user_id")
|
subscriptions = relationship("Subscription", back_populates="user", foreign_keys="Subscription.user_id")
|
||||||
role_changer = relationship("User", foreign_keys=[role_changed_by], remote_side="User.id", post_update=True)
|
|
||||||
payment_methods = relationship("PaymentMethod", back_populates="user", foreign_keys="PaymentMethod.user_id")
|
|
||||||
|
|
||||||
|
|
||||||
class PaymentMethod(Base):
|
|
||||||
"""Stored payment methods for users (Stripe or manual records)"""
|
|
||||||
__tablename__ = "payment_methods"
|
|
||||||
|
|
||||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
|
||||||
user_id = Column(UUID(as_uuid=True), ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True)
|
|
||||||
|
|
||||||
# Stripe payment method reference
|
|
||||||
stripe_payment_method_id = Column(String, nullable=True, unique=True, index=True, comment="Stripe pm_xxx reference")
|
|
||||||
|
|
||||||
# Card details (stored for display purposes - PCI compliant)
|
|
||||||
card_brand = Column(String(20), nullable=True, comment="Card brand: visa, mastercard, amex, etc.")
|
|
||||||
card_last4 = Column(String(4), nullable=True, comment="Last 4 digits of card")
|
|
||||||
card_exp_month = Column(Integer, nullable=True, comment="Card expiration month")
|
|
||||||
card_exp_year = Column(Integer, nullable=True, comment="Card expiration year")
|
|
||||||
card_funding = Column(String(20), nullable=True, comment="Card funding type: credit, debit, prepaid")
|
|
||||||
|
|
||||||
# Payment type classification
|
|
||||||
payment_type = Column(SQLEnum(PaymentMethodType), default=PaymentMethodType.card, nullable=False)
|
|
||||||
|
|
||||||
# Status flags
|
|
||||||
is_default = Column(Boolean, default=False, nullable=False, comment="Whether this is the default payment method for auto-renewals")
|
|
||||||
is_active = Column(Boolean, default=True, nullable=False, comment="Soft delete flag - False means removed")
|
|
||||||
is_manual = Column(Boolean, default=False, nullable=False, comment="True for manually recorded methods (cash/check)")
|
|
||||||
|
|
||||||
# Manual payment notes (for cash/check records)
|
|
||||||
manual_notes = Column(Text, nullable=True, comment="Admin notes for manual payment methods")
|
|
||||||
|
|
||||||
# Audit trail
|
|
||||||
created_by = Column(UUID(as_uuid=True), ForeignKey("users.id", ondelete="SET NULL"), nullable=True, comment="Admin who added this on behalf of user")
|
|
||||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), nullable=False)
|
|
||||||
updated_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc), nullable=False)
|
|
||||||
|
|
||||||
# Relationships
|
|
||||||
user = relationship("User", back_populates="payment_methods", foreign_keys=[user_id])
|
|
||||||
creator = relationship("User", foreign_keys=[created_by])
|
|
||||||
|
|
||||||
# Composite index for efficient queries
|
|
||||||
__table_args__ = (
|
|
||||||
Index('idx_payment_method_user_default', 'user_id', 'is_default'),
|
|
||||||
Index('idx_payment_method_active', 'user_id', 'is_active'),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Event(Base):
|
class Event(Base):
|
||||||
__tablename__ = "events"
|
__tablename__ = "events"
|
||||||
@@ -298,15 +233,6 @@ class Subscription(Base):
|
|||||||
donation_cents = Column(Integer, default=0, nullable=False) # Additional donation amount
|
donation_cents = Column(Integer, default=0, nullable=False) # Additional donation amount
|
||||||
# Note: amount_paid_cents = base_subscription_cents + donation_cents
|
# Note: amount_paid_cents = base_subscription_cents + donation_cents
|
||||||
|
|
||||||
# Stripe transaction metadata (for validation and audit)
|
|
||||||
stripe_payment_intent_id = Column(String, nullable=True, index=True) # Initial payment transaction ID
|
|
||||||
stripe_charge_id = Column(String, nullable=True, index=True) # Actual charge reference
|
|
||||||
stripe_invoice_id = Column(String, nullable=True, index=True) # Invoice reference
|
|
||||||
payment_completed_at = Column(DateTime(timezone=True), nullable=True) # Exact payment timestamp from Stripe
|
|
||||||
card_last4 = Column(String(4), nullable=True) # Last 4 digits of card
|
|
||||||
card_brand = Column(String(20), nullable=True) # Visa, Mastercard, etc.
|
|
||||||
stripe_receipt_url = Column(String, nullable=True) # Link to Stripe receipt
|
|
||||||
|
|
||||||
# Manual payment fields
|
# Manual payment fields
|
||||||
manual_payment = Column(Boolean, default=False, nullable=False) # Whether this was a manual offline payment
|
manual_payment = Column(Boolean, default=False, nullable=False) # Whether this was a manual offline payment
|
||||||
manual_payment_notes = Column(Text, nullable=True) # Admin notes about the payment
|
manual_payment_notes = Column(Text, nullable=True) # Admin notes about the payment
|
||||||
@@ -338,17 +264,9 @@ class Donation(Base):
|
|||||||
|
|
||||||
# Payment details
|
# Payment details
|
||||||
stripe_checkout_session_id = Column(String, nullable=True)
|
stripe_checkout_session_id = Column(String, nullable=True)
|
||||||
stripe_payment_intent_id = Column(String, nullable=True, index=True)
|
stripe_payment_intent_id = Column(String, nullable=True)
|
||||||
payment_method = Column(String, nullable=True) # card, bank_transfer, etc.
|
payment_method = Column(String, nullable=True) # card, bank_transfer, etc.
|
||||||
|
|
||||||
# Stripe transaction metadata (for validation and audit)
|
|
||||||
stripe_charge_id = Column(String, nullable=True, index=True) # Actual charge reference
|
|
||||||
stripe_customer_id = Column(String, nullable=True, index=True) # Customer ID if created
|
|
||||||
payment_completed_at = Column(DateTime(timezone=True), nullable=True) # Exact payment timestamp from Stripe
|
|
||||||
card_last4 = Column(String(4), nullable=True) # Last 4 digits of card
|
|
||||||
card_brand = Column(String(20), nullable=True) # Visa, Mastercard, etc.
|
|
||||||
stripe_receipt_url = Column(String, nullable=True) # Link to Stripe receipt
|
|
||||||
|
|
||||||
# Metadata
|
# Metadata
|
||||||
notes = Column(Text, nullable=True)
|
notes = Column(Text, nullable=True)
|
||||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||||
@@ -591,36 +509,3 @@ class ImportRollbackAudit(Base):
|
|||||||
# Relationships
|
# Relationships
|
||||||
import_job = relationship("ImportJob")
|
import_job = relationship("ImportJob")
|
||||||
admin_user = relationship("User", foreign_keys=[rolled_back_by])
|
admin_user = relationship("User", foreign_keys=[rolled_back_by])
|
||||||
|
|
||||||
|
|
||||||
# ============================================================
|
|
||||||
# System Settings Models
|
|
||||||
# ============================================================
|
|
||||||
|
|
||||||
class SettingType(enum.Enum):
|
|
||||||
plaintext = "plaintext"
|
|
||||||
encrypted = "encrypted"
|
|
||||||
json = "json"
|
|
||||||
|
|
||||||
|
|
||||||
class SystemSettings(Base):
|
|
||||||
"""System-wide configuration settings stored in database"""
|
|
||||||
__tablename__ = "system_settings"
|
|
||||||
|
|
||||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
|
||||||
setting_key = Column(String(100), unique=True, nullable=False, index=True)
|
|
||||||
setting_value = Column(Text, nullable=True)
|
|
||||||
setting_type = Column(SQLEnum(SettingType), default=SettingType.plaintext, nullable=False)
|
|
||||||
description = Column(Text, nullable=True)
|
|
||||||
updated_by = Column(UUID(as_uuid=True), ForeignKey("users.id", ondelete="SET NULL"), nullable=True)
|
|
||||||
created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), nullable=False)
|
|
||||||
updated_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc), nullable=False)
|
|
||||||
is_sensitive = Column(Boolean, default=False, nullable=False)
|
|
||||||
|
|
||||||
# Relationships
|
|
||||||
updater = relationship("User", foreign_keys=[updated_by])
|
|
||||||
|
|
||||||
# Index on updated_at for audit queries
|
|
||||||
__table_args__ = (
|
|
||||||
Index('idx_system_settings_updated_at', 'updated_at'),
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -11,9 +11,11 @@ from datetime import datetime, timezone, timedelta
|
|||||||
# Load environment variables
|
# Load environment variables
|
||||||
load_dotenv()
|
load_dotenv()
|
||||||
|
|
||||||
# NOTE: Stripe credentials are now database-driven
|
# Initialize Stripe with secret key
|
||||||
# These .env fallbacks are kept for backward compatibility only
|
stripe.api_key = os.getenv("STRIPE_SECRET_KEY")
|
||||||
# The actual credentials are loaded dynamically from system_settings table
|
|
||||||
|
# Stripe webhook secret for signature verification
|
||||||
|
STRIPE_WEBHOOK_SECRET = os.getenv("STRIPE_WEBHOOK_SECRET")
|
||||||
|
|
||||||
def create_checkout_session(
|
def create_checkout_session(
|
||||||
user_id: str,
|
user_id: str,
|
||||||
@@ -21,15 +23,11 @@ def create_checkout_session(
|
|||||||
plan_id: str,
|
plan_id: str,
|
||||||
stripe_price_id: str,
|
stripe_price_id: str,
|
||||||
success_url: str,
|
success_url: str,
|
||||||
cancel_url: str,
|
cancel_url: str
|
||||||
db = None
|
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Create a Stripe Checkout session for subscription payment.
|
Create a Stripe Checkout session for subscription payment.
|
||||||
|
|
||||||
Args:
|
|
||||||
db: Database session (optional, for reading Stripe credentials from database)
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
user_id: User's UUID
|
user_id: User's UUID
|
||||||
user_email: User's email address
|
user_email: User's email address
|
||||||
@@ -41,28 +39,6 @@ def create_checkout_session(
|
|||||||
Returns:
|
Returns:
|
||||||
dict: Checkout session object with session ID and URL
|
dict: Checkout session object with session ID and URL
|
||||||
"""
|
"""
|
||||||
# Load Stripe API key from database if available
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
# Import here to avoid circular dependency
|
|
||||||
from models import SystemSettings, SettingType
|
|
||||||
from encryption_service import get_encryption_service
|
|
||||||
|
|
||||||
setting = db.query(SystemSettings).filter(
|
|
||||||
SystemSettings.setting_key == 'stripe_secret_key'
|
|
||||||
).first()
|
|
||||||
|
|
||||||
if setting and setting.setting_value:
|
|
||||||
encryption_service = get_encryption_service()
|
|
||||||
stripe.api_key = encryption_service.decrypt(setting.setting_value)
|
|
||||||
except Exception as e:
|
|
||||||
# Fallback to .env if database read fails
|
|
||||||
print(f"Failed to read Stripe key from database: {e}")
|
|
||||||
stripe.api_key = os.getenv("STRIPE_SECRET_KEY")
|
|
||||||
else:
|
|
||||||
# Fallback to .env if no db session
|
|
||||||
stripe.api_key = os.getenv("STRIPE_SECRET_KEY")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Create Checkout Session
|
# Create Checkout Session
|
||||||
checkout_session = stripe.checkout.Session.create(
|
checkout_session = stripe.checkout.Session.create(
|
||||||
@@ -98,14 +74,13 @@ def create_checkout_session(
|
|||||||
raise Exception(f"Stripe error: {str(e)}")
|
raise Exception(f"Stripe error: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
def verify_webhook_signature(payload: bytes, sig_header: str, db=None) -> dict:
|
def verify_webhook_signature(payload: bytes, sig_header: str) -> dict:
|
||||||
"""
|
"""
|
||||||
Verify Stripe webhook signature and construct event.
|
Verify Stripe webhook signature and construct event.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
payload: Raw webhook payload bytes
|
payload: Raw webhook payload bytes
|
||||||
sig_header: Stripe signature header
|
sig_header: Stripe signature header
|
||||||
db: Database session (optional, for reading webhook secret from database)
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict: Verified webhook event
|
dict: Verified webhook event
|
||||||
@@ -113,32 +88,9 @@ def verify_webhook_signature(payload: bytes, sig_header: str, db=None) -> dict:
|
|||||||
Raises:
|
Raises:
|
||||||
ValueError: If signature verification fails
|
ValueError: If signature verification fails
|
||||||
"""
|
"""
|
||||||
# Load webhook secret from database if available
|
|
||||||
webhook_secret = None
|
|
||||||
if db:
|
|
||||||
try:
|
|
||||||
from models import SystemSettings
|
|
||||||
from encryption_service import get_encryption_service
|
|
||||||
|
|
||||||
setting = db.query(SystemSettings).filter(
|
|
||||||
SystemSettings.setting_key == 'stripe_webhook_secret'
|
|
||||||
).first()
|
|
||||||
|
|
||||||
if setting and setting.setting_value:
|
|
||||||
encryption_service = get_encryption_service()
|
|
||||||
webhook_secret = encryption_service.decrypt(setting.setting_value)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Failed to read webhook secret from database: {e}")
|
|
||||||
webhook_secret = os.getenv("STRIPE_WEBHOOK_SECRET")
|
|
||||||
else:
|
|
||||||
webhook_secret = os.getenv("STRIPE_WEBHOOK_SECRET")
|
|
||||||
|
|
||||||
if not webhook_secret:
|
|
||||||
raise ValueError("STRIPE_WEBHOOK_SECRET not configured")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
event = stripe.Webhook.construct_event(
|
event = stripe.Webhook.construct_event(
|
||||||
payload, sig_header, webhook_secret
|
payload, sig_header, STRIPE_WEBHOOK_SECRET
|
||||||
)
|
)
|
||||||
return event
|
return event
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
|
|||||||
@@ -327,38 +327,6 @@ PERMISSIONS = [
|
|||||||
"module": "gallery"
|
"module": "gallery"
|
||||||
},
|
},
|
||||||
|
|
||||||
# ========== PAYMENT METHODS MODULE ==========
|
|
||||||
{
|
|
||||||
"code": "payment_methods.view",
|
|
||||||
"name": "View Payment Methods",
|
|
||||||
"description": "View user payment methods (masked)",
|
|
||||||
"module": "payment_methods"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"code": "payment_methods.view_sensitive",
|
|
||||||
"name": "View Sensitive Payment Details",
|
|
||||||
"description": "View full payment method details including Stripe IDs (requires password)",
|
|
||||||
"module": "payment_methods"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"code": "payment_methods.create",
|
|
||||||
"name": "Create Payment Methods",
|
|
||||||
"description": "Add payment methods on behalf of users",
|
|
||||||
"module": "payment_methods"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"code": "payment_methods.delete",
|
|
||||||
"name": "Delete Payment Methods",
|
|
||||||
"description": "Delete user payment methods",
|
|
||||||
"module": "payment_methods"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"code": "payment_methods.set_default",
|
|
||||||
"name": "Set Default Payment Method",
|
|
||||||
"description": "Set a user's default payment method",
|
|
||||||
"module": "payment_methods"
|
|
||||||
},
|
|
||||||
|
|
||||||
# ========== SETTINGS MODULE ==========
|
# ========== SETTINGS MODULE ==========
|
||||||
{
|
{
|
||||||
"code": "settings.view",
|
"code": "settings.view",
|
||||||
@@ -485,10 +453,6 @@ DEFAULT_ROLE_PERMISSIONS = {
|
|||||||
"gallery.edit",
|
"gallery.edit",
|
||||||
"gallery.delete",
|
"gallery.delete",
|
||||||
"gallery.moderate",
|
"gallery.moderate",
|
||||||
"payment_methods.view",
|
|
||||||
"payment_methods.create",
|
|
||||||
"payment_methods.delete",
|
|
||||||
"payment_methods.set_default",
|
|
||||||
"settings.view",
|
"settings.view",
|
||||||
"settings.edit",
|
"settings.edit",
|
||||||
"settings.email_templates",
|
"settings.email_templates",
|
||||||
@@ -496,36 +460,6 @@ DEFAULT_ROLE_PERMISSIONS = {
|
|||||||
"settings.logs",
|
"settings.logs",
|
||||||
],
|
],
|
||||||
|
|
||||||
UserRole.finance: [
|
|
||||||
# Finance role has all admin permissions plus sensitive payment access
|
|
||||||
"users.view",
|
|
||||||
"users.export",
|
|
||||||
"events.view",
|
|
||||||
"events.rsvps",
|
|
||||||
"events.calendar_export",
|
|
||||||
"subscriptions.view",
|
|
||||||
"subscriptions.create",
|
|
||||||
"subscriptions.edit",
|
|
||||||
"subscriptions.cancel",
|
|
||||||
"subscriptions.activate",
|
|
||||||
"subscriptions.plans",
|
|
||||||
"financials.view",
|
|
||||||
"financials.create",
|
|
||||||
"financials.edit",
|
|
||||||
"financials.delete",
|
|
||||||
"financials.export",
|
|
||||||
"financials.payments",
|
|
||||||
"newsletters.view",
|
|
||||||
"bylaws.view",
|
|
||||||
"gallery.view",
|
|
||||||
"payment_methods.view",
|
|
||||||
"payment_methods.view_sensitive", # Finance can view sensitive payment details
|
|
||||||
"payment_methods.create",
|
|
||||||
"payment_methods.delete",
|
|
||||||
"payment_methods.set_default",
|
|
||||||
"settings.view",
|
|
||||||
],
|
|
||||||
|
|
||||||
# Superadmin gets all permissions automatically in code,
|
# Superadmin gets all permissions automatically in code,
|
||||||
# so we don't need to explicitly assign them
|
# so we don't need to explicitly assign them
|
||||||
UserRole.superadmin: []
|
UserRole.superadmin: []
|
||||||
|
|||||||
144
r2_storage.py
144
r2_storage.py
@@ -35,29 +35,6 @@ class R2Storage:
|
|||||||
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': ['.xlsx']
|
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': ['.xlsx']
|
||||||
}
|
}
|
||||||
|
|
||||||
# Branding assets (logo and favicon)
|
|
||||||
ALLOWED_BRANDING_TYPES = {
|
|
||||||
'image/jpeg': ['.jpg', '.jpeg'],
|
|
||||||
'image/png': ['.png'],
|
|
||||||
'image/webp': ['.webp'],
|
|
||||||
'image/svg+xml': ['.svg']
|
|
||||||
}
|
|
||||||
|
|
||||||
ALLOWED_FAVICON_TYPES = {
|
|
||||||
'image/x-icon': ['.ico'],
|
|
||||||
'image/vnd.microsoft.icon': ['.ico'],
|
|
||||||
'image/png': ['.png'],
|
|
||||||
'image/svg+xml': ['.svg']
|
|
||||||
}
|
|
||||||
|
|
||||||
# CSV files for imports
|
|
||||||
ALLOWED_CSV_TYPES = {
|
|
||||||
'text/csv': ['.csv'],
|
|
||||||
'text/plain': ['.csv'], # Some systems report CSV as text/plain
|
|
||||||
'application/csv': ['.csv'],
|
|
||||||
'application/vnd.ms-excel': ['.csv'], # Old Excel type sometimes used for CSV
|
|
||||||
}
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
"""Initialize R2 client with credentials from environment"""
|
"""Initialize R2 client with credentials from environment"""
|
||||||
self.account_id = os.getenv('R2_ACCOUNT_ID')
|
self.account_id = os.getenv('R2_ACCOUNT_ID')
|
||||||
@@ -248,127 +225,6 @@ class R2Storage:
|
|||||||
except ClientError:
|
except ClientError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
async def upload_bytes(
|
|
||||||
self,
|
|
||||||
content: bytes,
|
|
||||||
folder: str,
|
|
||||||
filename: str,
|
|
||||||
content_type: str = 'text/csv'
|
|
||||||
) -> tuple[str, str, int]:
|
|
||||||
"""
|
|
||||||
Upload raw bytes to R2 storage (useful for CSV imports)
|
|
||||||
|
|
||||||
Args:
|
|
||||||
content: Raw bytes to upload
|
|
||||||
folder: Folder path in R2 (e.g., 'imports/job-id')
|
|
||||||
filename: Original filename
|
|
||||||
content_type: MIME type of the content
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
tuple: (public_url, object_key, file_size_bytes)
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
HTTPException: If upload fails
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
file_size = len(content)
|
|
||||||
|
|
||||||
# Generate unique filename preserving original extension
|
|
||||||
file_extension = Path(filename).suffix.lower() or '.csv'
|
|
||||||
unique_filename = f"{uuid.uuid4()}{file_extension}"
|
|
||||||
object_key = f"{folder}/{unique_filename}"
|
|
||||||
|
|
||||||
# Upload to R2
|
|
||||||
self.client.put_object(
|
|
||||||
Bucket=self.bucket_name,
|
|
||||||
Key=object_key,
|
|
||||||
Body=content,
|
|
||||||
ContentType=content_type,
|
|
||||||
ContentLength=file_size
|
|
||||||
)
|
|
||||||
|
|
||||||
# Generate public URL
|
|
||||||
public_url = self.get_public_url(object_key)
|
|
||||||
|
|
||||||
return public_url, object_key, file_size
|
|
||||||
|
|
||||||
except ClientError as e:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=500,
|
|
||||||
detail=f"Failed to upload to R2: {str(e)}"
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=500,
|
|
||||||
detail=f"Upload error: {str(e)}"
|
|
||||||
)
|
|
||||||
|
|
||||||
async def download_file(self, object_key: str) -> bytes:
|
|
||||||
"""
|
|
||||||
Download a file from R2 storage
|
|
||||||
|
|
||||||
Args:
|
|
||||||
object_key: The S3 object key (path) of the file
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bytes: File content
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
HTTPException: If download fails
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
response = self.client.get_object(
|
|
||||||
Bucket=self.bucket_name,
|
|
||||||
Key=object_key
|
|
||||||
)
|
|
||||||
return response['Body'].read()
|
|
||||||
|
|
||||||
except ClientError as e:
|
|
||||||
if e.response['Error']['Code'] == 'NoSuchKey':
|
|
||||||
raise HTTPException(status_code=404, detail="File not found in storage")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=500,
|
|
||||||
detail=f"Failed to download file from R2: {str(e)}"
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=500,
|
|
||||||
detail=f"Download error: {str(e)}"
|
|
||||||
)
|
|
||||||
|
|
||||||
async def delete_multiple(self, object_keys: list[str]) -> bool:
|
|
||||||
"""
|
|
||||||
Delete multiple files from R2 storage
|
|
||||||
|
|
||||||
Args:
|
|
||||||
object_keys: List of S3 object keys to delete
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if successful
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
HTTPException: If deletion fails
|
|
||||||
"""
|
|
||||||
if not object_keys:
|
|
||||||
return True
|
|
||||||
|
|
||||||
try:
|
|
||||||
# R2/S3 delete_objects accepts up to 1000 keys at once
|
|
||||||
objects = [{'Key': key} for key in object_keys if key]
|
|
||||||
|
|
||||||
if objects:
|
|
||||||
self.client.delete_objects(
|
|
||||||
Bucket=self.bucket_name,
|
|
||||||
Delete={'Objects': objects}
|
|
||||||
)
|
|
||||||
return True
|
|
||||||
|
|
||||||
except ClientError as e:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=500,
|
|
||||||
detail=f"Failed to delete files from R2: {str(e)}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# Singleton instance
|
# Singleton instance
|
||||||
_r2_storage = None
|
_r2_storage = None
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
"""
|
"""
|
||||||
Permission Seeding Script for Dynamic RBAC System
|
Permission Seeding Script for Dynamic RBAC System
|
||||||
|
|
||||||
This script populates the database with 65 granular permissions and assigns them
|
This script populates the database with 59 granular permissions and assigns them
|
||||||
to the appropriate dynamic roles (not the old enum roles).
|
to the appropriate dynamic roles (not the old enum roles).
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
@@ -33,7 +33,7 @@ engine = create_engine(DATABASE_URL)
|
|||||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||||
|
|
||||||
# ============================================================
|
# ============================================================
|
||||||
# Permission Definitions (65 permissions across 11 modules)
|
# Permission Definitions (59 permissions across 10 modules)
|
||||||
# ============================================================
|
# ============================================================
|
||||||
|
|
||||||
PERMISSIONS = [
|
PERMISSIONS = [
|
||||||
@@ -116,55 +116,6 @@ PERMISSIONS = [
|
|||||||
{"code": "permissions.assign", "name": "Assign Permissions", "description": "Assign permissions to roles", "module": "permissions"},
|
{"code": "permissions.assign", "name": "Assign Permissions", "description": "Assign permissions to roles", "module": "permissions"},
|
||||||
{"code": "permissions.manage_roles", "name": "Manage Roles", "description": "Create and manage user roles", "module": "permissions"},
|
{"code": "permissions.manage_roles", "name": "Manage Roles", "description": "Create and manage user roles", "module": "permissions"},
|
||||||
{"code": "permissions.audit", "name": "View Permission Audit Log", "description": "View permission change audit logs", "module": "permissions"},
|
{"code": "permissions.audit", "name": "View Permission Audit Log", "description": "View permission change audit logs", "module": "permissions"},
|
||||||
|
|
||||||
# ========== PAYMENT METHODS MODULE (5) ==========
|
|
||||||
{"code": "payment_methods.view", "name": "View Payment Methods", "description": "View user payment methods (masked)", "module": "payment_methods"},
|
|
||||||
{"code": "payment_methods.view_sensitive", "name": "View Sensitive Payment Details", "description": "View full Stripe payment method IDs (requires password)", "module": "payment_methods"},
|
|
||||||
{"code": "payment_methods.create", "name": "Create Payment Methods", "description": "Add payment methods on behalf of users", "module": "payment_methods"},
|
|
||||||
{"code": "payment_methods.delete", "name": "Delete Payment Methods", "description": "Remove user payment methods", "module": "payment_methods"},
|
|
||||||
{"code": "payment_methods.set_default", "name": "Set Default Payment Method", "description": "Set default payment method for users", "module": "payment_methods"},
|
|
||||||
|
|
||||||
# ========== REGISTRATION MODULE (2) ==========
|
|
||||||
{"code": "registration.view", "name": "View Registration Settings", "description": "View registration form schema and settings", "module": "registration"},
|
|
||||||
{"code": "registration.manage", "name": "Manage Registration Form", "description": "Edit registration form schema, steps, and fields", "module": "registration"},
|
|
||||||
|
|
||||||
# ========== DIRECTORY MODULE (2) ==========
|
|
||||||
{"code": "directory.view", "name": "View Directory Settings", "description": "View member directory field configuration", "module": "directory"},
|
|
||||||
{"code": "directory.manage", "name": "Manage Directory Fields", "description": "Enable/disable directory fields shown in Profile and Directory pages", "module": "directory"},
|
|
||||||
]
|
|
||||||
|
|
||||||
# Default system roles that must exist
|
|
||||||
DEFAULT_ROLES = [
|
|
||||||
{
|
|
||||||
"code": "guest",
|
|
||||||
"name": "Guest",
|
|
||||||
"description": "Default role for new registrations with no special permissions",
|
|
||||||
"is_system_role": True
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"code": "member",
|
|
||||||
"name": "Member",
|
|
||||||
"description": "Active paying members with access to member-only content",
|
|
||||||
"is_system_role": True
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"code": "finance",
|
|
||||||
"name": "Finance",
|
|
||||||
"description": "Financial management role with access to payments, subscriptions, and reports",
|
|
||||||
"is_system_role": True
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"code": "admin",
|
|
||||||
"name": "Admin",
|
|
||||||
"description": "Board members with full management access except RBAC",
|
|
||||||
"is_system_role": True
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"code": "superadmin",
|
|
||||||
"name": "Superadmin",
|
|
||||||
"description": "Full system access including RBAC management",
|
|
||||||
"is_system_role": True
|
|
||||||
},
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# Default permission assignments for dynamic roles
|
# Default permission assignments for dynamic roles
|
||||||
@@ -185,9 +136,6 @@ DEFAULT_ROLE_PERMISSIONS = {
|
|||||||
"subscriptions.cancel", "subscriptions.activate", "subscriptions.plans",
|
"subscriptions.cancel", "subscriptions.activate", "subscriptions.plans",
|
||||||
"subscriptions.export",
|
"subscriptions.export",
|
||||||
"donations.view", "donations.export",
|
"donations.view", "donations.export",
|
||||||
# Payment methods - finance can view sensitive details
|
|
||||||
"payment_methods.view", "payment_methods.view_sensitive",
|
|
||||||
"payment_methods.create", "payment_methods.delete", "payment_methods.set_default",
|
|
||||||
],
|
],
|
||||||
|
|
||||||
"admin": [
|
"admin": [
|
||||||
@@ -209,13 +157,6 @@ DEFAULT_ROLE_PERMISSIONS = {
|
|||||||
"gallery.view", "gallery.upload", "gallery.edit", "gallery.delete", "gallery.moderate",
|
"gallery.view", "gallery.upload", "gallery.edit", "gallery.delete", "gallery.moderate",
|
||||||
"settings.view", "settings.edit", "settings.email_templates", "settings.storage",
|
"settings.view", "settings.edit", "settings.email_templates", "settings.storage",
|
||||||
"settings.logs",
|
"settings.logs",
|
||||||
# Payment methods - admin can manage but not view sensitive details
|
|
||||||
"payment_methods.view", "payment_methods.create",
|
|
||||||
"payment_methods.delete", "payment_methods.set_default",
|
|
||||||
# Registration form management
|
|
||||||
"registration.view", "registration.manage",
|
|
||||||
# Directory configuration
|
|
||||||
"directory.view", "directory.manage",
|
|
||||||
],
|
],
|
||||||
|
|
||||||
"superadmin": [
|
"superadmin": [
|
||||||
@@ -255,34 +196,7 @@ def seed_permissions():
|
|||||||
print(f"\n⚠️ WARNING: Tables not fully cleared! Stopping.")
|
print(f"\n⚠️ WARNING: Tables not fully cleared! Stopping.")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Step 2: Create default system roles
|
# Step 2: Create permissions
|
||||||
print(f"\n👤 Creating {len(DEFAULT_ROLES)} system roles...")
|
|
||||||
role_map = {}
|
|
||||||
|
|
||||||
for role_data in DEFAULT_ROLES:
|
|
||||||
# Check if role already exists
|
|
||||||
existing_role = db.query(Role).filter(Role.code == role_data["code"]).first()
|
|
||||||
if existing_role:
|
|
||||||
print(f" • {role_data['name']}: Already exists, updating...")
|
|
||||||
existing_role.name = role_data["name"]
|
|
||||||
existing_role.description = role_data["description"]
|
|
||||||
existing_role.is_system_role = role_data["is_system_role"]
|
|
||||||
role_map[role_data["code"]] = existing_role
|
|
||||||
else:
|
|
||||||
print(f" • {role_data['name']}: Creating...")
|
|
||||||
role = Role(
|
|
||||||
code=role_data["code"],
|
|
||||||
name=role_data["name"],
|
|
||||||
description=role_data["description"],
|
|
||||||
is_system_role=role_data["is_system_role"]
|
|
||||||
)
|
|
||||||
db.add(role)
|
|
||||||
role_map[role_data["code"]] = role
|
|
||||||
|
|
||||||
db.commit()
|
|
||||||
print(f"✓ Created/updated {len(DEFAULT_ROLES)} system roles")
|
|
||||||
|
|
||||||
# Step 3: Create permissions
|
|
||||||
print(f"\n📝 Creating {len(PERMISSIONS)} permissions...")
|
print(f"\n📝 Creating {len(PERMISSIONS)} permissions...")
|
||||||
permission_map = {} # Map code to permission object
|
permission_map = {} # Map code to permission object
|
||||||
|
|
||||||
@@ -299,13 +213,13 @@ def seed_permissions():
|
|||||||
db.commit()
|
db.commit()
|
||||||
print(f"✓ Created {len(PERMISSIONS)} permissions")
|
print(f"✓ Created {len(PERMISSIONS)} permissions")
|
||||||
|
|
||||||
# Step 4: Verify roles exist
|
# Step 3: Get all roles from database
|
||||||
print("\n🔍 Verifying dynamic roles...")
|
print("\n🔍 Fetching dynamic roles...")
|
||||||
roles = db.query(Role).all()
|
roles = db.query(Role).all()
|
||||||
role_map = {role.code: role for role in roles}
|
role_map = {role.code: role for role in roles}
|
||||||
print(f"✓ Found {len(roles)} roles: {', '.join(role_map.keys())}")
|
print(f"✓ Found {len(roles)} roles: {', '.join(role_map.keys())}")
|
||||||
|
|
||||||
# Step 5: Assign permissions to roles
|
# Step 4: Assign permissions to roles
|
||||||
print("\n🔐 Assigning permissions to roles...")
|
print("\n🔐 Assigning permissions to roles...")
|
||||||
|
|
||||||
from models import UserRole # Import for enum mapping
|
from models import UserRole # Import for enum mapping
|
||||||
@@ -344,7 +258,7 @@ def seed_permissions():
|
|||||||
db.commit()
|
db.commit()
|
||||||
print(f" ✓ {role.name}: Assigned {len(permission_codes)} permissions")
|
print(f" ✓ {role.name}: Assigned {len(permission_codes)} permissions")
|
||||||
|
|
||||||
# Step 6: Summary
|
# Step 5: Summary
|
||||||
print("\n" + "=" * 80)
|
print("\n" + "=" * 80)
|
||||||
print("📊 SEEDING SUMMARY")
|
print("📊 SEEDING SUMMARY")
|
||||||
print("=" * 80)
|
print("=" * 80)
|
||||||
@@ -359,8 +273,7 @@ def seed_permissions():
|
|||||||
for module, count in sorted(modules.items()):
|
for module, count in sorted(modules.items()):
|
||||||
print(f" • {module.capitalize()}: {count} permissions")
|
print(f" • {module.capitalize()}: {count} permissions")
|
||||||
|
|
||||||
print(f"\nTotal system roles created: {len(DEFAULT_ROLES)}")
|
print(f"\nTotal permissions created: {len(PERMISSIONS)}")
|
||||||
print(f"Total permissions created: {len(PERMISSIONS)}")
|
|
||||||
print(f"Total role-permission mappings: {total_assigned}")
|
print(f"Total role-permission mappings: {total_assigned}")
|
||||||
print("\n✅ Permission seeding completed successfully!")
|
print("\n✅ Permission seeding completed successfully!")
|
||||||
print("\nNext step: Restart backend server")
|
print("\nNext step: Restart backend server")
|
||||||
|
|||||||
@@ -10,127 +10,21 @@ Key Features:
|
|||||||
- Validate and standardize user data (DOB, phone numbers)
|
- Validate and standardize user data (DOB, phone numbers)
|
||||||
- Generate smart status suggestions based on approval and subscription data
|
- Generate smart status suggestions based on approval and subscription data
|
||||||
- Comprehensive data quality analysis and error reporting
|
- Comprehensive data quality analysis and error reporting
|
||||||
- Multi-file import support (Users, Members, Payments CSVs)
|
|
||||||
- Field mapping based on Meta Name Reference document
|
|
||||||
|
|
||||||
Author: Claude Code
|
Author: Claude Code
|
||||||
Date: 2025-12-24
|
Date: 2025-12-24
|
||||||
Updated: 2026-02-03 - Added comprehensive multi-file import support
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import csv
|
import csv
|
||||||
import re
|
import re
|
||||||
import logging
|
import logging
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Dict, List, Optional, Tuple, Any
|
from typing import Dict, List, Optional, Tuple
|
||||||
import phpserialize
|
import phpserialize
|
||||||
import pandas as pd
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Meta Name Reference Field Mapping (from client's WordPress export)
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
# Maps WordPress meta names to our database fields
|
|
||||||
# Format: 'wordpress_meta_name': ('db_field', 'field_type', 'parser_function')
|
|
||||||
META_FIELD_MAPPING = {
|
|
||||||
# Basic user info
|
|
||||||
'first_name': ('first_name', 'string', None),
|
|
||||||
'last_name': ('last_name', 'string', None),
|
|
||||||
'user_email': ('email', 'string', 'lowercase'),
|
|
||||||
'user_login': ('username', 'string', None), # For reference only
|
|
||||||
'address': ('address', 'string', None),
|
|
||||||
'city': ('city', 'string', None),
|
|
||||||
'state': ('state', 'string', None),
|
|
||||||
'zipcode': ('zipcode', 'string', None),
|
|
||||||
'cell_phone': ('phone', 'string', 'phone'),
|
|
||||||
'date_of_birth': ('date_of_birth', 'date', 'date_mmddyyyy'),
|
|
||||||
|
|
||||||
# Partner info
|
|
||||||
'partner_first_name': ('partner_first_name', 'string', None),
|
|
||||||
'partner_last_name': ('partner_last_name', 'string', None),
|
|
||||||
'partner_membership_status': ('partner_is_member', 'boolean', 'yes_no'),
|
|
||||||
'partner_membership_consideration': ('partner_plan_to_become_member', 'boolean', 'yes_no'),
|
|
||||||
|
|
||||||
# Newsletter preferences
|
|
||||||
'newsletter_consent': ('newsletter_subscribed', 'boolean', 'yes_no'),
|
|
||||||
'newsletter_checklist': ('newsletter_preferences', 'multi_value', 'newsletter_checklist'),
|
|
||||||
|
|
||||||
# Referral and lead sources
|
|
||||||
'member_referral': ('referred_by_member_name', 'string', None),
|
|
||||||
'referral_source': ('lead_sources', 'multi_value', 'lead_sources'),
|
|
||||||
|
|
||||||
# Volunteer interests
|
|
||||||
'volunteer_checklist': ('volunteer_interests', 'multi_value', 'volunteer_checklist'),
|
|
||||||
|
|
||||||
# Scholarship
|
|
||||||
'scholarship_request': ('scholarship_requested', 'boolean', 'yes_no'),
|
|
||||||
'scholarship_reason': ('scholarship_reason', 'string', None),
|
|
||||||
|
|
||||||
# Directory settings
|
|
||||||
'members_directory_filter': ('show_in_directory', 'boolean', 'yes_no'),
|
|
||||||
'md_display_name': ('custom_registration_data.directory_display_name', 'custom', None),
|
|
||||||
'md_email': ('directory_email', 'string', None),
|
|
||||||
'description': ('directory_bio', 'string', None),
|
|
||||||
'md_adress': ('directory_address', 'string', None), # Note: typo in WordPress
|
|
||||||
'md_phone': ('directory_phone', 'string', None),
|
|
||||||
'md_dob': ('directory_dob', 'date', 'date_mmddyyyy'),
|
|
||||||
'md_partner_name': ('directory_partner_name', 'string', None),
|
|
||||||
'md_avatar': ('profile_photo_url', 'string', None),
|
|
||||||
|
|
||||||
# Metadata
|
|
||||||
'member_since': ('member_since', 'date', 'date_various'),
|
|
||||||
'user_registered': ('wordpress_registered_date', 'datetime', 'datetime_mysql'),
|
|
||||||
'ID': ('wordpress_user_id', 'integer', None),
|
|
||||||
|
|
||||||
# Stripe info (from WordPress)
|
|
||||||
'pms_stripe_customer_id': ('stripe_customer_id', 'string', None),
|
|
||||||
}
|
|
||||||
|
|
||||||
# Newsletter checklist option mapping
|
|
||||||
NEWSLETTER_CHECKLIST_OPTIONS = {
|
|
||||||
'name': 'newsletter_publish_name',
|
|
||||||
'photo': 'newsletter_publish_photo',
|
|
||||||
'birthday': 'newsletter_publish_birthday',
|
|
||||||
'none': 'newsletter_publish_none',
|
|
||||||
# Handle various WordPress stored formats
|
|
||||||
'my name': 'newsletter_publish_name',
|
|
||||||
'my photo': 'newsletter_publish_photo',
|
|
||||||
'my birthday': 'newsletter_publish_birthday',
|
|
||||||
}
|
|
||||||
|
|
||||||
# Volunteer interests mapping (WordPress values to our format)
|
|
||||||
VOLUNTEER_INTERESTS_MAP = {
|
|
||||||
'events': 'Events',
|
|
||||||
'fundraising': 'Fundraising',
|
|
||||||
'communications': 'Communications',
|
|
||||||
'membership': 'Membership',
|
|
||||||
'board': 'Board of Directors',
|
|
||||||
'other': 'Other',
|
|
||||||
# Handle various WordPress formats
|
|
||||||
'help with events': 'Events',
|
|
||||||
'help with fundraising': 'Fundraising',
|
|
||||||
'help with communications': 'Communications',
|
|
||||||
'help with membership': 'Membership',
|
|
||||||
'serve on the board': 'Board of Directors',
|
|
||||||
}
|
|
||||||
|
|
||||||
# Lead sources mapping
|
|
||||||
LEAD_SOURCES_MAP = {
|
|
||||||
'current member': 'Current member',
|
|
||||||
'friend': 'Friend',
|
|
||||||
'outsmart magazine': 'OutSmart Magazine',
|
|
||||||
'outsmart': 'OutSmart Magazine',
|
|
||||||
'search engine': 'Search engine (Google etc.)',
|
|
||||||
'google': 'Search engine (Google etc.)',
|
|
||||||
'known about loaf': "I've known about LOAF for a long time",
|
|
||||||
'long time': "I've known about LOAF for a long time",
|
|
||||||
'other': 'Other',
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
# WordPress Role Mapping Configuration
|
# WordPress Role Mapping Configuration
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
@@ -389,622 +283,6 @@ def validate_dob(dob_str: str) -> Tuple[Optional[datetime], Optional[str]]:
|
|||||||
return None, f'Invalid date format: {dob_str} (expected MM/DD/YYYY)'
|
return None, f'Invalid date format: {dob_str} (expected MM/DD/YYYY)'
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Enhanced Field Parsers for Meta Name Reference
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
def parse_boolean_yes_no(value: Any) -> bool:
|
|
||||||
"""
|
|
||||||
Parse yes/no style boolean values from WordPress.
|
|
||||||
|
|
||||||
Handles: yes, no, true, false, 1, 0, checked, unchecked
|
|
||||||
"""
|
|
||||||
if value is None or (isinstance(value, float) and pd.isna(value)):
|
|
||||||
return False
|
|
||||||
|
|
||||||
str_val = str(value).lower().strip()
|
|
||||||
return str_val in ('yes', 'true', '1', 'checked', 'on', 'y')
|
|
||||||
|
|
||||||
|
|
||||||
def parse_date_various(date_str: Any) -> Optional[datetime]:
|
|
||||||
"""
|
|
||||||
Parse dates in various formats commonly found in WordPress exports.
|
|
||||||
|
|
||||||
Handles:
|
|
||||||
- MM/DD/YYYY (US format)
|
|
||||||
- YYYY-MM-DD (ISO format)
|
|
||||||
- DD/MM/YYYY (EU format - attempted if US fails)
|
|
||||||
- Month DD, YYYY (e.g., "January 15, 2020")
|
|
||||||
"""
|
|
||||||
if date_str is None or (isinstance(date_str, float) and pd.isna(date_str)):
|
|
||||||
return None
|
|
||||||
|
|
||||||
date_str = str(date_str).strip()
|
|
||||||
if not date_str or date_str.lower() == 'nan':
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Try various formats
|
|
||||||
formats = [
|
|
||||||
'%m/%d/%Y', # US: 01/15/2020
|
|
||||||
'%Y-%m-%d', # ISO: 2020-01-15
|
|
||||||
'%d/%m/%Y', # EU: 15/01/2020
|
|
||||||
'%B %d, %Y', # Full: January 15, 2020
|
|
||||||
'%b %d, %Y', # Short: Jan 15, 2020
|
|
||||||
'%Y-%m-%d %H:%M:%S', # MySQL datetime
|
|
||||||
'%m/%Y', # Month/Year: 01/2020
|
|
||||||
'%m-%Y', # Month-Year: 01-2020
|
|
||||||
'%b-%Y', # Short month-Year: Jan-2020
|
|
||||||
'%B-%Y', # Full month-Year: January-2020
|
|
||||||
]
|
|
||||||
|
|
||||||
for fmt in formats:
|
|
||||||
try:
|
|
||||||
parsed = datetime.strptime(date_str, fmt)
|
|
||||||
# Validate year range
|
|
||||||
if 1900 <= parsed.year <= datetime.now().year + 1:
|
|
||||||
return parsed
|
|
||||||
except ValueError:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Only log warning for strings that look like dates
|
|
||||||
if date_str and len(date_str) > 3:
|
|
||||||
logger.debug(f"Could not parse date: {date_str}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def parse_datetime_mysql(dt_str: Any) -> Optional[datetime]:
|
|
||||||
"""Parse MySQL datetime format: YYYY-MM-DD HH:MM:SS"""
|
|
||||||
if dt_str is None or (isinstance(dt_str, float) and pd.isna(dt_str)):
|
|
||||||
return None
|
|
||||||
|
|
||||||
try:
|
|
||||||
return datetime.strptime(str(dt_str).strip(), '%Y-%m-%d %H:%M:%S')
|
|
||||||
except ValueError:
|
|
||||||
return parse_date_various(dt_str)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_newsletter_checklist(value: Any) -> Dict[str, bool]:
|
|
||||||
"""
|
|
||||||
Parse newsletter checklist multi-value field.
|
|
||||||
|
|
||||||
WordPress stores this as comma-separated or PHP serialized values.
|
|
||||||
Returns dict mapping to our newsletter_publish_* fields.
|
|
||||||
"""
|
|
||||||
result = {
|
|
||||||
'newsletter_publish_name': False,
|
|
||||||
'newsletter_publish_photo': False,
|
|
||||||
'newsletter_publish_birthday': False,
|
|
||||||
'newsletter_publish_none': False,
|
|
||||||
}
|
|
||||||
|
|
||||||
if value is None or (isinstance(value, float) and pd.isna(value)):
|
|
||||||
return result
|
|
||||||
|
|
||||||
str_val = str(value).lower().strip()
|
|
||||||
if not str_val or str_val == 'nan':
|
|
||||||
return result
|
|
||||||
|
|
||||||
# Try PHP serialized first
|
|
||||||
if str_val.startswith('a:'):
|
|
||||||
try:
|
|
||||||
parsed = phpserialize.loads(str_val.encode('utf-8'))
|
|
||||||
if isinstance(parsed, dict):
|
|
||||||
for key in parsed.keys():
|
|
||||||
key_str = key.decode('utf-8') if isinstance(key, bytes) else str(key)
|
|
||||||
key_lower = key_str.lower()
|
|
||||||
for match_key, field in NEWSLETTER_CHECKLIST_OPTIONS.items():
|
|
||||||
if match_key in key_lower:
|
|
||||||
result[field] = True
|
|
||||||
return result
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Try comma-separated values
|
|
||||||
items = [item.strip().lower() for item in str_val.split(',')]
|
|
||||||
for item in items:
|
|
||||||
for match_key, field in NEWSLETTER_CHECKLIST_OPTIONS.items():
|
|
||||||
if match_key in item:
|
|
||||||
result[field] = True
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def parse_volunteer_checklist(value: Any) -> List[str]:
|
|
||||||
"""
|
|
||||||
Parse volunteer interests checklist.
|
|
||||||
|
|
||||||
Returns list of standardized volunteer interest labels.
|
|
||||||
"""
|
|
||||||
if value is None or (isinstance(value, float) and pd.isna(value)):
|
|
||||||
return []
|
|
||||||
|
|
||||||
str_val = str(value).lower().strip()
|
|
||||||
if not str_val or str_val == 'nan':
|
|
||||||
return []
|
|
||||||
|
|
||||||
interests = []
|
|
||||||
|
|
||||||
# Try PHP serialized first
|
|
||||||
if str_val.startswith('a:'):
|
|
||||||
try:
|
|
||||||
parsed = phpserialize.loads(str_val.encode('utf-8'))
|
|
||||||
if isinstance(parsed, dict):
|
|
||||||
for key in parsed.keys():
|
|
||||||
key_str = key.decode('utf-8') if isinstance(key, bytes) else str(key)
|
|
||||||
key_lower = key_str.lower()
|
|
||||||
for match_key, label in VOLUNTEER_INTERESTS_MAP.items():
|
|
||||||
if match_key in key_lower and label not in interests:
|
|
||||||
interests.append(label)
|
|
||||||
return interests
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Try comma-separated values
|
|
||||||
items = [item.strip().lower() for item in str_val.split(',')]
|
|
||||||
for item in items:
|
|
||||||
for match_key, label in VOLUNTEER_INTERESTS_MAP.items():
|
|
||||||
if match_key in item and label not in interests:
|
|
||||||
interests.append(label)
|
|
||||||
|
|
||||||
return interests
|
|
||||||
|
|
||||||
|
|
||||||
def parse_lead_sources(value: Any) -> List[str]:
|
|
||||||
"""
|
|
||||||
Parse referral/lead sources field.
|
|
||||||
|
|
||||||
Returns list of standardized lead source labels.
|
|
||||||
"""
|
|
||||||
if value is None or (isinstance(value, float) and pd.isna(value)):
|
|
||||||
return []
|
|
||||||
|
|
||||||
str_val = str(value).lower().strip()
|
|
||||||
if not str_val or str_val == 'nan':
|
|
||||||
return []
|
|
||||||
|
|
||||||
sources = []
|
|
||||||
|
|
||||||
# Try PHP serialized first
|
|
||||||
if str_val.startswith('a:'):
|
|
||||||
try:
|
|
||||||
parsed = phpserialize.loads(str_val.encode('utf-8'))
|
|
||||||
if isinstance(parsed, dict):
|
|
||||||
for key in parsed.keys():
|
|
||||||
key_str = key.decode('utf-8') if isinstance(key, bytes) else str(key)
|
|
||||||
key_lower = key_str.lower()
|
|
||||||
for match_key, label in LEAD_SOURCES_MAP.items():
|
|
||||||
if match_key in key_lower and label not in sources:
|
|
||||||
sources.append(label)
|
|
||||||
return sources
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Try comma-separated values
|
|
||||||
items = [item.strip().lower() for item in str_val.split(',')]
|
|
||||||
for item in items:
|
|
||||||
matched = False
|
|
||||||
for match_key, label in LEAD_SOURCES_MAP.items():
|
|
||||||
if match_key in item and label not in sources:
|
|
||||||
sources.append(label)
|
|
||||||
matched = True
|
|
||||||
break
|
|
||||||
# If no match, add as "Other" with original value
|
|
||||||
if not matched and item:
|
|
||||||
sources.append('Other')
|
|
||||||
|
|
||||||
return sources
|
|
||||||
|
|
||||||
|
|
||||||
def transform_csv_row_to_user_data(row: Dict[str, Any], existing_emails: set = None) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Transform a CSV row to user data dictionary using Meta Name Reference mapping.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
row: Dictionary of CSV column values
|
|
||||||
existing_emails: Set of emails already in database (for duplicate check)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Dictionary with:
|
|
||||||
- user_data: Fields that map to User model
|
|
||||||
- custom_data: Fields for custom_registration_data JSON
|
|
||||||
- newsletter_prefs: Newsletter preference booleans
|
|
||||||
- warnings: List of warning messages
|
|
||||||
- errors: List of error messages
|
|
||||||
"""
|
|
||||||
user_data = {}
|
|
||||||
custom_data = {}
|
|
||||||
newsletter_prefs = {}
|
|
||||||
warnings = []
|
|
||||||
errors = []
|
|
||||||
|
|
||||||
# Process each mapped field
|
|
||||||
for csv_field, (db_field, field_type, parser) in META_FIELD_MAPPING.items():
|
|
||||||
value = row.get(csv_field)
|
|
||||||
|
|
||||||
# Skip if no value
|
|
||||||
if value is None or (isinstance(value, float) and pd.isna(value)):
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Parse based on field type
|
|
||||||
if field_type == 'string':
|
|
||||||
if parser == 'lowercase':
|
|
||||||
parsed_value = str(value).strip().lower()
|
|
||||||
elif parser == 'phone':
|
|
||||||
parsed_value = standardize_phone(value)
|
|
||||||
if parsed_value == '0000000000':
|
|
||||||
warnings.append(f'Invalid phone: {value}')
|
|
||||||
else:
|
|
||||||
parsed_value = str(value).strip() if value else None
|
|
||||||
|
|
||||||
elif field_type == 'integer':
|
|
||||||
parsed_value = int(value) if value else None
|
|
||||||
|
|
||||||
elif field_type == 'boolean':
|
|
||||||
parsed_value = parse_boolean_yes_no(value)
|
|
||||||
|
|
||||||
elif field_type == 'date':
|
|
||||||
if parser == 'date_mmddyyyy':
|
|
||||||
parsed_value, warning = validate_dob(value)
|
|
||||||
if warning:
|
|
||||||
warnings.append(warning)
|
|
||||||
else:
|
|
||||||
parsed_value = parse_date_various(value)
|
|
||||||
|
|
||||||
elif field_type == 'datetime':
|
|
||||||
parsed_value = parse_datetime_mysql(value)
|
|
||||||
|
|
||||||
elif field_type == 'multi_value':
|
|
||||||
if parser == 'newsletter_checklist':
|
|
||||||
newsletter_prefs = parse_newsletter_checklist(value)
|
|
||||||
continue # Handled separately
|
|
||||||
elif parser == 'volunteer_checklist':
|
|
||||||
parsed_value = parse_volunteer_checklist(value)
|
|
||||||
elif parser == 'lead_sources':
|
|
||||||
parsed_value = parse_lead_sources(value)
|
|
||||||
else:
|
|
||||||
parsed_value = [str(value)]
|
|
||||||
|
|
||||||
elif field_type == 'custom':
|
|
||||||
# Store in custom_registration_data
|
|
||||||
custom_field = db_field.replace('custom_registration_data.', '')
|
|
||||||
custom_data[custom_field] = str(value).strip() if value else None
|
|
||||||
continue
|
|
||||||
|
|
||||||
else:
|
|
||||||
parsed_value = value
|
|
||||||
|
|
||||||
# Store in appropriate location
|
|
||||||
if parsed_value is not None:
|
|
||||||
user_data[db_field] = parsed_value
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
warnings.append(f'Error parsing {csv_field}: {str(e)}')
|
|
||||||
|
|
||||||
# Check for required fields
|
|
||||||
if not user_data.get('email'):
|
|
||||||
errors.append('Missing email address')
|
|
||||||
elif existing_emails and user_data['email'] in existing_emails:
|
|
||||||
errors.append('Email already exists in database')
|
|
||||||
|
|
||||||
if not user_data.get('first_name'):
|
|
||||||
warnings.append('Missing first name')
|
|
||||||
|
|
||||||
if not user_data.get('last_name'):
|
|
||||||
warnings.append('Missing last name')
|
|
||||||
|
|
||||||
return {
|
|
||||||
'user_data': user_data,
|
|
||||||
'custom_data': custom_data,
|
|
||||||
'newsletter_prefs': newsletter_prefs,
|
|
||||||
'warnings': warnings,
|
|
||||||
'errors': errors
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Members CSV Parser (Subscription Data)
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
def parse_members_csv(file_path: str) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Parse WordPress PMS Members export CSV for subscription data.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
file_path: Path to pms-export-members CSV file
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Dictionary mapping user_email to subscription data
|
|
||||||
"""
|
|
||||||
members_data = {}
|
|
||||||
|
|
||||||
try:
|
|
||||||
df = pd.read_csv(file_path)
|
|
||||||
|
|
||||||
for _, row in df.iterrows():
|
|
||||||
email = str(row.get('user_email', '')).strip().lower()
|
|
||||||
if not email or email == 'nan':
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Parse subscription dates
|
|
||||||
start_date = parse_date_various(row.get('start_date'))
|
|
||||||
expiration_date = parse_date_various(row.get('expiration_date'))
|
|
||||||
|
|
||||||
# Map subscription status
|
|
||||||
wp_status = str(row.get('status', '')).lower().strip()
|
|
||||||
if wp_status == 'active':
|
|
||||||
sub_status = 'active'
|
|
||||||
elif wp_status in ('expired', 'abandoned'):
|
|
||||||
sub_status = 'expired'
|
|
||||||
elif wp_status in ('canceled', 'cancelled'):
|
|
||||||
sub_status = 'cancelled'
|
|
||||||
else:
|
|
||||||
sub_status = 'active' # Default
|
|
||||||
|
|
||||||
# Parse payment gateway
|
|
||||||
payment_gateway = str(row.get('payment_gateway', '')).lower().strip()
|
|
||||||
if 'stripe' in payment_gateway:
|
|
||||||
payment_method = 'stripe'
|
|
||||||
elif 'paypal' in payment_gateway:
|
|
||||||
payment_method = 'paypal'
|
|
||||||
elif payment_gateway in ('manual', 'admin', ''):
|
|
||||||
payment_method = 'manual'
|
|
||||||
else:
|
|
||||||
payment_method = payment_gateway or 'manual'
|
|
||||||
|
|
||||||
members_data[email] = {
|
|
||||||
'subscription_plan_id': row.get('subscription_plan_id'),
|
|
||||||
'subscription_plan_name': row.get('subscription_plan_name'),
|
|
||||||
'start_date': start_date,
|
|
||||||
'end_date': expiration_date,
|
|
||||||
'status': sub_status,
|
|
||||||
'payment_method': payment_method,
|
|
||||||
'wordpress_user_id': row.get('user_id'),
|
|
||||||
'billing_first_name': row.get('billing_first_name'),
|
|
||||||
'billing_last_name': row.get('billing_last_name'),
|
|
||||||
'billing_address': row.get('billing_address'),
|
|
||||||
'billing_city': row.get('billing_city'),
|
|
||||||
'billing_state': row.get('billing_state'),
|
|
||||||
'billing_zip': row.get('billing_zip'),
|
|
||||||
'card_last4': row.get('billing_card_last4'),
|
|
||||||
}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error parsing members CSV: {str(e)}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
return members_data
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Payments CSV Parser (Payment History)
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
def parse_payments_csv(file_path: str) -> Dict[str, List[Dict]]:
|
|
||||||
"""
|
|
||||||
Parse WordPress PMS Payments export CSV for payment history.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
file_path: Path to pms-export-payments CSV file
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Dictionary mapping user_email to list of payment records
|
|
||||||
"""
|
|
||||||
payments_data = {}
|
|
||||||
|
|
||||||
try:
|
|
||||||
df = pd.read_csv(file_path)
|
|
||||||
|
|
||||||
for _, row in df.iterrows():
|
|
||||||
email = str(row.get('user_email', '')).strip().lower()
|
|
||||||
if not email or email == 'nan':
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Parse payment date
|
|
||||||
payment_date = parse_date_various(row.get('date'))
|
|
||||||
|
|
||||||
# Parse amount (convert to cents)
|
|
||||||
amount_str = str(row.get('amount', '0')).replace('$', '').replace(',', '').strip()
|
|
||||||
try:
|
|
||||||
amount_cents = int(float(amount_str) * 100)
|
|
||||||
except (ValueError, TypeError):
|
|
||||||
amount_cents = 0
|
|
||||||
|
|
||||||
# Map payment status
|
|
||||||
wp_status = str(row.get('status', '')).lower().strip()
|
|
||||||
if wp_status == 'completed':
|
|
||||||
payment_status = 'completed'
|
|
||||||
elif wp_status in ('pending', 'processing'):
|
|
||||||
payment_status = 'pending'
|
|
||||||
elif wp_status in ('failed', 'refunded'):
|
|
||||||
payment_status = 'failed'
|
|
||||||
else:
|
|
||||||
payment_status = 'completed' # Default for historical data
|
|
||||||
|
|
||||||
payment_record = {
|
|
||||||
'payment_id': row.get('payment_id'),
|
|
||||||
'amount_cents': amount_cents,
|
|
||||||
'status': payment_status,
|
|
||||||
'date': payment_date,
|
|
||||||
'payment_gateway': row.get('payment_gateway'),
|
|
||||||
'transaction_id': row.get('transaction_id'),
|
|
||||||
'profile_id': row.get('profile_id'),
|
|
||||||
'subscription_plan_id': row.get('subscription_plan_id'),
|
|
||||||
'wordpress_user_id': row.get('user_id'),
|
|
||||||
}
|
|
||||||
|
|
||||||
if email not in payments_data:
|
|
||||||
payments_data[email] = []
|
|
||||||
payments_data[email].append(payment_record)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error parsing payments CSV: {str(e)}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
return payments_data
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Comprehensive Import Analysis
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
def analyze_comprehensive_import(
|
|
||||||
users_csv_path: str,
|
|
||||||
members_csv_path: Optional[str] = None,
|
|
||||||
payments_csv_path: Optional[str] = None,
|
|
||||||
existing_emails: Optional[set] = None
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Analyze all CSV files for comprehensive import with cross-referencing.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
users_csv_path: Path to WordPress users export CSV (required)
|
|
||||||
members_csv_path: Path to PMS members CSV (optional)
|
|
||||||
payments_csv_path: Path to PMS payments CSV (optional)
|
|
||||||
existing_emails: Set of emails already in database
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Comprehensive analysis with preview data for all files
|
|
||||||
"""
|
|
||||||
if existing_emails is None:
|
|
||||||
existing_emails = set()
|
|
||||||
|
|
||||||
result = {
|
|
||||||
'users': {'total': 0, 'valid': 0, 'warnings': 0, 'errors': 0, 'preview': []},
|
|
||||||
'members': {'total': 0, 'matched': 0, 'unmatched': 0, 'data': {}},
|
|
||||||
'payments': {'total': 0, 'matched': 0, 'total_amount_cents': 0, 'data': {}},
|
|
||||||
'summary': {
|
|
||||||
'total_users': 0,
|
|
||||||
'importable_users': 0,
|
|
||||||
'duplicate_emails': 0,
|
|
||||||
'users_with_subscriptions': 0,
|
|
||||||
'users_with_payments': 0,
|
|
||||||
'total_payment_amount': 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Parse members CSV if provided
|
|
||||||
members_data = {}
|
|
||||||
if members_csv_path:
|
|
||||||
try:
|
|
||||||
members_data = parse_members_csv(members_csv_path)
|
|
||||||
result['members']['total'] = len(members_data)
|
|
||||||
result['members']['data'] = members_data
|
|
||||||
except Exception as e:
|
|
||||||
result['members']['error'] = str(e)
|
|
||||||
|
|
||||||
# Parse payments CSV if provided
|
|
||||||
payments_data = {}
|
|
||||||
if payments_csv_path:
|
|
||||||
try:
|
|
||||||
payments_data = parse_payments_csv(payments_csv_path)
|
|
||||||
result['payments']['total'] = sum(len(p) for p in payments_data.values())
|
|
||||||
result['payments']['data'] = payments_data
|
|
||||||
result['payments']['total_amount_cents'] = sum(
|
|
||||||
sum(p['amount_cents'] for p in payments)
|
|
||||||
for payments in payments_data.values()
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
result['payments']['error'] = str(e)
|
|
||||||
|
|
||||||
# Parse users CSV
|
|
||||||
try:
|
|
||||||
df = pd.read_csv(users_csv_path)
|
|
||||||
result['users']['total'] = len(df)
|
|
||||||
|
|
||||||
seen_emails = set()
|
|
||||||
total_warnings = 0
|
|
||||||
total_errors = 0
|
|
||||||
|
|
||||||
for idx, row in df.iterrows():
|
|
||||||
row_dict = row.to_dict()
|
|
||||||
transformed = transform_csv_row_to_user_data(row_dict, existing_emails)
|
|
||||||
|
|
||||||
email = transformed['user_data'].get('email', '').lower()
|
|
||||||
|
|
||||||
# Check for CSV duplicates
|
|
||||||
if email in seen_emails:
|
|
||||||
transformed['errors'].append(f'Duplicate email in CSV')
|
|
||||||
elif email:
|
|
||||||
seen_emails.add(email)
|
|
||||||
|
|
||||||
# Cross-reference with members data
|
|
||||||
subscription_data = members_data.get(email)
|
|
||||||
if subscription_data:
|
|
||||||
result['members']['matched'] += 1
|
|
||||||
|
|
||||||
# Cross-reference with payments data
|
|
||||||
payment_records = payments_data.get(email, [])
|
|
||||||
if payment_records:
|
|
||||||
result['payments']['matched'] += 1
|
|
||||||
|
|
||||||
# Parse WordPress roles for role/status suggestion
|
|
||||||
wp_capabilities = row.get('wp_capabilities', '')
|
|
||||||
wp_roles = parse_php_serialized(wp_capabilities)
|
|
||||||
loaf_role, role_status = map_wordpress_role(wp_roles)
|
|
||||||
|
|
||||||
# Determine status
|
|
||||||
approval_status = str(row.get('wppb_approval_status', '')).strip()
|
|
||||||
has_subscription = 'pms_subscription_plan_63' in wp_roles or subscription_data is not None
|
|
||||||
|
|
||||||
if role_status:
|
|
||||||
suggested_status = role_status
|
|
||||||
else:
|
|
||||||
suggested_status = suggest_status(approval_status, has_subscription, loaf_role)
|
|
||||||
|
|
||||||
# Build preview row
|
|
||||||
preview_row = {
|
|
||||||
'row_number': idx + 1,
|
|
||||||
'email': email,
|
|
||||||
'first_name': transformed['user_data'].get('first_name', ''),
|
|
||||||
'last_name': transformed['user_data'].get('last_name', ''),
|
|
||||||
'phone': transformed['user_data'].get('phone', ''),
|
|
||||||
'date_of_birth': transformed['user_data'].get('date_of_birth').isoformat() if transformed['user_data'].get('date_of_birth') else None,
|
|
||||||
'wordpress_user_id': transformed['user_data'].get('wordpress_user_id'),
|
|
||||||
'wordpress_roles': wp_roles,
|
|
||||||
'suggested_role': loaf_role,
|
|
||||||
'suggested_status': suggested_status,
|
|
||||||
'has_subscription': has_subscription,
|
|
||||||
'subscription_data': subscription_data,
|
|
||||||
'payment_count': len(payment_records),
|
|
||||||
'total_paid_cents': sum(p['amount_cents'] for p in payment_records),
|
|
||||||
'user_data': transformed['user_data'],
|
|
||||||
'custom_data': transformed['custom_data'],
|
|
||||||
'newsletter_prefs': transformed['newsletter_prefs'],
|
|
||||||
'warnings': transformed['warnings'],
|
|
||||||
'errors': transformed['errors'],
|
|
||||||
}
|
|
||||||
|
|
||||||
result['users']['preview'].append(preview_row)
|
|
||||||
total_warnings += len(transformed['warnings'])
|
|
||||||
total_errors += len(transformed['errors'])
|
|
||||||
|
|
||||||
if not transformed['errors']:
|
|
||||||
result['users']['valid'] += 1
|
|
||||||
|
|
||||||
result['users']['warnings'] = total_warnings
|
|
||||||
result['users']['errors'] = total_errors
|
|
||||||
|
|
||||||
# Calculate unmatched members
|
|
||||||
user_emails = {p['email'] for p in result['users']['preview'] if p['email']}
|
|
||||||
result['members']['unmatched'] = len(set(members_data.keys()) - user_emails)
|
|
||||||
|
|
||||||
# Summary stats
|
|
||||||
result['summary']['total_users'] = result['users']['total']
|
|
||||||
result['summary']['importable_users'] = result['users']['valid']
|
|
||||||
result['summary']['duplicate_emails'] = len(seen_emails & existing_emails)
|
|
||||||
result['summary']['users_with_subscriptions'] = result['members']['matched']
|
|
||||||
result['summary']['users_with_payments'] = result['payments']['matched']
|
|
||||||
result['summary']['total_payment_amount'] = result['payments']['total_amount_cents']
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error analyzing users CSV: {str(e)}")
|
|
||||||
result['users']['error'] = str(e)
|
|
||||||
raise
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
# CSV Analysis and Preview Generation
|
# CSV Analysis and Preview Generation
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
@@ -1066,6 +344,8 @@ def analyze_csv(file_path: str, existing_emails: Optional[set] = None) -> Dict:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
# Read CSV with pandas
|
# Read CSV with pandas
|
||||||
df = pd.read_csv(file_path)
|
df = pd.read_csv(file_path)
|
||||||
|
|
||||||
@@ -1241,4 +521,11 @@ def format_preview_for_display(preview_data: List[Dict], page: int = 1, page_siz
|
|||||||
# Module Initialization
|
# Module Initialization
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
|
|
||||||
|
# Import pandas for CSV processing
|
||||||
|
try:
|
||||||
|
import pandas as pd
|
||||||
|
except ImportError:
|
||||||
|
logger.error("pandas library not found. Please install: pip install pandas")
|
||||||
|
raise
|
||||||
|
|
||||||
logger.info("WordPress parser module loaded successfully")
|
logger.info("WordPress parser module loaded successfully")
|
||||||
|
|||||||
Reference in New Issue
Block a user