21 Commits

Author SHA1 Message Date
37b1ab75df Merge pull request 'Merge from Dev to LOAF Production' (#23) from dev into loaf-prod
Reviewed-on: #23
2026-01-07 08:43:14 +00:00
f915976cb3 Merge pull request 'feat: Implement Option 3 - Proper RBAC with role-based staff invitations' (#22) from dev into loaf-prod
Reviewed-on: #22
2026-01-06 08:35:09 +00:00
9c5aafc57b Merge pull request 'Add missing endpoints, fix batch updates, and implement RSVP status' (#21) from dev into loaf-prod
Reviewed-on: #21
2026-01-05 18:08:21 +00:00
3755a71ed8 Merge pull request 'Alembic migration for synchronize Database' (#20) from dev into loaf-prod
Reviewed-on: #20
2026-01-05 14:16:21 +00:00
b2293a5588 Merge pull request 'Alembic fix for PROD' (#19) from dev into loaf-prod
Reviewed-on: #19
2026-01-05 10:31:38 +00:00
9f29bf05d8 Merge pull request 'Database Migration fix' (#18) from dev into loaf-prod
Reviewed-on: #18
2026-01-05 10:26:08 +00:00
b44d55919e Merge pull request 'Alembic Database fix' (#17) from dev into loaf-prod
Reviewed-on: #17
2026-01-05 10:16:04 +00:00
1a6341a94c Merge pull request 'Alembic Database Syncronization' (#16) from dev into loaf-prod
Reviewed-on: #16
2026-01-05 10:09:27 +00:00
727cbf4b5c Merge pull request 'Merge from dev' (#15) from dev into loaf-prod
Reviewed-on: #15
2026-01-05 08:49:16 +00:00
9c3f3c88b8 Merge pull request 'Add comprehensive column check and migration 009' (#14) from dev into loaf-prod
Reviewed-on: #14
2026-01-04 16:19:51 +00:00
849a6a32af Merge pull request 'Add missing donations table columns' (#13) from dev into loaf-prod
Reviewed-on: #13
2026-01-04 16:10:27 +00:00
69b8185414 Merge pull request 'Fix migration 007 - skip existing columns' (#12) from dev into loaf-prod
Reviewed-on: #12
2026-01-04 16:06:27 +00:00
f5f8ca8dc6 Merge pull request 'Add missing subscription_plans columns' (#11) from dev into loaf-prod
Reviewed-on: #11
2026-01-04 16:03:43 +00:00
661a4cbb7c Merge pull request 'Fix subscription_plans.is_active column name' (#10) from dev into loaf-prod
Reviewed-on: #10
2026-01-04 15:58:05 +00:00
a01a8b9915 Merge pull request 'Superadmin nullable fix' (#9) from dev into loaf-prod
Reviewed-on: #9
2026-01-04 15:35:59 +00:00
e126cb988c Merge pull request 'Subscription and Storage data mismatch' (#8) from dev into loaf-prod
Reviewed-on: #8
2026-01-04 15:28:46 +00:00
fd988241a1 Merge pull request 'Subscription and Storage data mismatch' (#7) from dev into loaf-prod
Reviewed-on: #7
2026-01-04 15:24:11 +00:00
c28eddca67 Merge pull request 'Fix database mismatches' (#6) from dev into loaf-prod
Reviewed-on: #6
2026-01-04 15:17:18 +00:00
e20542ccdc Merge pull request 'Fix database mismatches' (#5) from dev into loaf-prod
Reviewed-on: #5
2026-01-04 15:02:09 +00:00
b3f1f5f789 Merge pull request 'Prod Deployment Preparation' (#4) from dev into loaf-prod
Reviewed-on: #4
2026-01-04 12:10:12 +00:00
1da045f73f Merge pull request 'Update Gitignore' (#3) from dev into loaf-prod
Reviewed-on: #3
2026-01-02 08:45:29 +00:00
31 changed files with 327 additions and 8201 deletions

View File

@@ -1,83 +0,0 @@
# Git
.git
.gitignore
# Python
__pycache__
*.py[cod]
*$py.class
*.so
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
# Virtual environments
venv/
ENV/
env/
.venv/
# IDE
.idea/
.vscode/
*.swp
*.swo
*~
# Testing
.pytest_cache/
.coverage
htmlcov/
.tox/
.nox/
# Environment files (will be mounted or passed via env vars)
.env
.env.local
.env.*.local
*.env
# Logs
*.log
logs/
# Database
*.db
*.sqlite3
# Alembic
alembic/versions/__pycache__/
# Docker
Dockerfile
docker-compose*.yml
.docker/
# Documentation
*.md
docs/
# Temporary files
tmp/
temp/
*.tmp
# OS files
.DS_Store
Thumbs.db
# Uploads (will be mounted as volume)
uploads/

View File

@@ -6,10 +6,6 @@ JWT_SECRET=your-secret-key-change-this-in-production
JWT_ALGORITHM=HS256
ACCESS_TOKEN_EXPIRE_MINUTES=30
# Settings Encryption (for database-stored sensitive settings)
# Generate with: python -c "import secrets; print(secrets.token_urlsafe(64))"
SETTINGS_ENCRYPTION_KEY=your-encryption-key-generate-with-command-above
# SMTP Email Configuration (Port 465 - SSL/TLS)
SMTP_HOST=p.konceptkit.com
SMTP_PORT=465
@@ -32,14 +28,7 @@ SMTP_FROM_NAME=LOAF Membership
# Frontend URL
FRONTEND_URL=http://localhost:3000
# Backend URL (for webhook URLs and API references)
# Used to construct Stripe webhook URL shown in Admin Settings
BACKEND_URL=http://localhost:8000
# Stripe Configuration (NOW DATABASE-DRIVEN via Admin Settings page)
# Configure Stripe credentials through the Admin Settings UI (requires SETTINGS_ENCRYPTION_KEY)
# No longer requires .env variables - managed through database for dynamic updates
# Legacy .env variables below are deprecated:
# Stripe Configuration (for future payment integration)
# STRIPE_SECRET_KEY=sk_test_...
# STRIPE_WEBHOOK_SECRET=whsec_...

3
.gitignore vendored
View File

@@ -245,9 +245,6 @@ temp_uploads/
tmp/
temporary/
# Generated SQL files (from scripts)
create_superadmin.sql
# CSV imports
imports/*.csv
!imports/.gitkeep

View File

@@ -1,40 +0,0 @@
# Backend Dockerfile - FastAPI with Python
FROM python:3.11-slim
# Set environment variables
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
ENV PYTHONPATH=/app
# Set work directory
WORKDIR /app
# Install system dependencies
RUN apt-get update && apt-get install -y \
gcc \
libpq-dev \
curl \
&& rm -rf /var/lib/apt/lists/*
# Install Python dependencies
COPY requirements.txt .
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir -r requirements.txt
# Copy application code
COPY . .
# Create non-root user for security
RUN adduser --disabled-password --gecos '' appuser && \
chown -R appuser:appuser /app
USER appuser
# Expose port
EXPOSE 8000
# Health check
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
CMD curl -f http://localhost:8000/health || exit 1
# Run the application
CMD ["uvicorn", "server:app", "--host", "0.0.0.0", "--port", "8000"]

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -1,141 +0,0 @@
#!/usr/bin/env python3
"""
Add Directory Permissions Script
This script adds the new directory.view and directory.manage permissions
without clearing existing permissions.
Usage:
python add_directory_permissions.py
"""
import os
import sys
from sqlalchemy import create_engine, text
from sqlalchemy.orm import sessionmaker
from database import Base
from models import Permission, RolePermission, Role, UserRole
from dotenv import load_dotenv
# Load environment variables
load_dotenv()
# Database connection
DATABASE_URL = os.getenv("DATABASE_URL")
if not DATABASE_URL:
print("Error: DATABASE_URL environment variable not set")
sys.exit(1)
engine = create_engine(DATABASE_URL)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
# New directory permissions
NEW_PERMISSIONS = [
{"code": "directory.view", "name": "View Directory Settings", "description": "View member directory field configuration", "module": "directory"},
{"code": "directory.manage", "name": "Manage Directory Fields", "description": "Enable/disable directory fields shown in Profile and Directory pages", "module": "directory"},
]
# Roles that should have these permissions
ROLE_PERMISSION_MAP = {
"directory.view": ["admin", "superadmin"],
"directory.manage": ["admin", "superadmin"],
}
def add_directory_permissions():
"""Add directory permissions and assign to appropriate roles"""
db = SessionLocal()
try:
print("=" * 60)
print("Adding Directory Permissions")
print("=" * 60)
# Step 1: Add permissions if they don't exist
print("\n1. Adding permissions...")
permission_map = {}
for perm_data in NEW_PERMISSIONS:
existing = db.query(Permission).filter(Permission.code == perm_data["code"]).first()
if existing:
print(f" - {perm_data['code']}: Already exists")
permission_map[perm_data["code"]] = existing
else:
permission = Permission(
code=perm_data["code"],
name=perm_data["name"],
description=perm_data["description"],
module=perm_data["module"]
)
db.add(permission)
db.flush() # Get the ID
permission_map[perm_data["code"]] = permission
print(f" - {perm_data['code']}: Created")
db.commit()
# Step 2: Get roles
print("\n2. Fetching roles...")
roles = db.query(Role).all()
role_map = {role.code: role for role in roles}
print(f" Found {len(roles)} roles: {', '.join(role_map.keys())}")
# Enum mapping for backward compatibility
role_enum_map = {
'guest': UserRole.guest,
'member': UserRole.member,
'admin': UserRole.admin,
'superadmin': UserRole.superadmin,
'finance': UserRole.finance
}
# Step 3: Assign permissions to roles
print("\n3. Assigning permissions to roles...")
for perm_code, role_codes in ROLE_PERMISSION_MAP.items():
permission = permission_map.get(perm_code)
if not permission:
print(f" Warning: Permission {perm_code} not found")
continue
for role_code in role_codes:
role = role_map.get(role_code)
if not role:
print(f" Warning: Role {role_code} not found")
continue
# Check if mapping already exists
existing_mapping = db.query(RolePermission).filter(
RolePermission.role_id == role.id,
RolePermission.permission_id == permission.id
).first()
if existing_mapping:
print(f" - {role_code} -> {perm_code}: Already assigned")
else:
role_enum = role_enum_map.get(role_code, UserRole.guest)
mapping = RolePermission(
role=role_enum,
role_id=role.id,
permission_id=permission.id
)
db.add(mapping)
print(f" - {role_code} -> {perm_code}: Assigned")
db.commit()
print("\n" + "=" * 60)
print("Directory permissions added successfully!")
print("=" * 60)
except Exception as e:
db.rollback()
print(f"\nError: {str(e)}")
import traceback
traceback.print_exc()
raise
finally:
db.close()
if __name__ == "__main__":
add_directory_permissions()

View File

@@ -1,141 +0,0 @@
#!/usr/bin/env python3
"""
Add Registration Permissions Script
This script adds the new registration.view and registration.manage permissions
without clearing existing permissions.
Usage:
python add_registration_permissions.py
"""
import os
import sys
from sqlalchemy import create_engine, text
from sqlalchemy.orm import sessionmaker
from database import Base
from models import Permission, RolePermission, Role, UserRole
from dotenv import load_dotenv
# Load environment variables
load_dotenv()
# Database connection
DATABASE_URL = os.getenv("DATABASE_URL")
if not DATABASE_URL:
print("Error: DATABASE_URL environment variable not set")
sys.exit(1)
engine = create_engine(DATABASE_URL)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
# New registration permissions
NEW_PERMISSIONS = [
{"code": "registration.view", "name": "View Registration Settings", "description": "View registration form schema and settings", "module": "registration"},
{"code": "registration.manage", "name": "Manage Registration Form", "description": "Edit registration form schema, steps, and fields", "module": "registration"},
]
# Roles that should have these permissions
ROLE_PERMISSION_MAP = {
"registration.view": ["admin", "superadmin"],
"registration.manage": ["admin", "superadmin"],
}
def add_registration_permissions():
"""Add registration permissions and assign to appropriate roles"""
db = SessionLocal()
try:
print("=" * 60)
print("Adding Registration Permissions")
print("=" * 60)
# Step 1: Add permissions if they don't exist
print("\n1. Adding permissions...")
permission_map = {}
for perm_data in NEW_PERMISSIONS:
existing = db.query(Permission).filter(Permission.code == perm_data["code"]).first()
if existing:
print(f" - {perm_data['code']}: Already exists")
permission_map[perm_data["code"]] = existing
else:
permission = Permission(
code=perm_data["code"],
name=perm_data["name"],
description=perm_data["description"],
module=perm_data["module"]
)
db.add(permission)
db.flush() # Get the ID
permission_map[perm_data["code"]] = permission
print(f" - {perm_data['code']}: Created")
db.commit()
# Step 2: Get roles
print("\n2. Fetching roles...")
roles = db.query(Role).all()
role_map = {role.code: role for role in roles}
print(f" Found {len(roles)} roles: {', '.join(role_map.keys())}")
# Enum mapping for backward compatibility
role_enum_map = {
'guest': UserRole.guest,
'member': UserRole.member,
'admin': UserRole.admin,
'superadmin': UserRole.superadmin,
'finance': UserRole.finance
}
# Step 3: Assign permissions to roles
print("\n3. Assigning permissions to roles...")
for perm_code, role_codes in ROLE_PERMISSION_MAP.items():
permission = permission_map.get(perm_code)
if not permission:
print(f" Warning: Permission {perm_code} not found")
continue
for role_code in role_codes:
role = role_map.get(role_code)
if not role:
print(f" Warning: Role {role_code} not found")
continue
# Check if mapping already exists
existing_mapping = db.query(RolePermission).filter(
RolePermission.role_id == role.id,
RolePermission.permission_id == permission.id
).first()
if existing_mapping:
print(f" - {role_code} -> {perm_code}: Already assigned")
else:
role_enum = role_enum_map.get(role_code, UserRole.guest)
mapping = RolePermission(
role=role_enum,
role_id=role.id,
permission_id=permission.id
)
db.add(mapping)
print(f" - {role_code} -> {perm_code}: Assigned")
db.commit()
print("\n" + "=" * 60)
print("Registration permissions added successfully!")
print("=" * 60)
except Exception as e:
db.rollback()
print(f"\nError: {str(e)}")
import traceback
traceback.print_exc()
raise
finally:
db.close()
if __name__ == "__main__":
add_registration_permissions()

View File

@@ -1,39 +0,0 @@
"""add_custom_registration_data
Revision ID: 014_custom_registration
Revises: a1b2c3d4e5f6
Create Date: 2026-02-01 10:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '014_custom_registration'
down_revision: Union[str, None] = 'a1b2c3d4e5f6'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Add custom_registration_data column to users table
# This stores dynamic registration field responses as JSON
op.add_column('users', sa.Column(
'custom_registration_data',
sa.JSON,
nullable=False,
server_default='{}'
))
# Add comment for documentation
op.execute("""
COMMENT ON COLUMN users.custom_registration_data IS
'Dynamic registration field responses stored as JSON for custom form fields';
""")
def downgrade() -> None:
op.drop_column('users', 'custom_registration_data')

View File

@@ -1,48 +0,0 @@
"""add_role_audit_fields
Revision ID: 4fa11836f7fd
Revises: 013_sync_permissions
Create Date: 2026-01-16 17:21:40.514605
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import UUID
# revision identifiers, used by Alembic.
revision: str = '4fa11836f7fd'
down_revision: Union[str, None] = '013_sync_permissions'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Add role audit trail columns
op.add_column('users', sa.Column('role_changed_at', sa.DateTime(timezone=True), nullable=True))
op.add_column('users', sa.Column('role_changed_by', UUID(as_uuid=True), nullable=True))
# Create foreign key constraint to track who changed the role
op.create_foreign_key(
'fk_users_role_changed_by',
'users', 'users',
['role_changed_by'], ['id'],
ondelete='SET NULL'
)
# Create index for efficient querying by role change date
op.create_index('idx_users_role_changed_at', 'users', ['role_changed_at'])
def downgrade() -> None:
# Drop index first
op.drop_index('idx_users_role_changed_at')
# Drop foreign key constraint
op.drop_constraint('fk_users_role_changed_by', 'users', type_='foreignkey')
# Drop columns
op.drop_column('users', 'role_changed_by')
op.drop_column('users', 'role_changed_at')

View File

@@ -1,76 +0,0 @@
"""add_stripe_transaction_metadata
Revision ID: 956ea1628264
Revises: ec4cb4a49cde
Create Date: 2026-01-20 22:00:01.806931
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '956ea1628264'
down_revision: Union[str, None] = 'ec4cb4a49cde'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Add Stripe transaction metadata to subscriptions table
op.add_column('subscriptions', sa.Column('stripe_payment_intent_id', sa.String(), nullable=True))
op.add_column('subscriptions', sa.Column('stripe_charge_id', sa.String(), nullable=True))
op.add_column('subscriptions', sa.Column('stripe_invoice_id', sa.String(), nullable=True))
op.add_column('subscriptions', sa.Column('payment_completed_at', sa.DateTime(timezone=True), nullable=True))
op.add_column('subscriptions', sa.Column('card_last4', sa.String(4), nullable=True))
op.add_column('subscriptions', sa.Column('card_brand', sa.String(20), nullable=True))
op.add_column('subscriptions', sa.Column('stripe_receipt_url', sa.String(), nullable=True))
# Add indexes for Stripe transaction IDs in subscriptions
op.create_index('idx_subscriptions_payment_intent', 'subscriptions', ['stripe_payment_intent_id'])
op.create_index('idx_subscriptions_charge_id', 'subscriptions', ['stripe_charge_id'])
op.create_index('idx_subscriptions_invoice_id', 'subscriptions', ['stripe_invoice_id'])
# Add Stripe transaction metadata to donations table
op.add_column('donations', sa.Column('stripe_charge_id', sa.String(), nullable=True))
op.add_column('donations', sa.Column('stripe_customer_id', sa.String(), nullable=True))
op.add_column('donations', sa.Column('payment_completed_at', sa.DateTime(timezone=True), nullable=True))
op.add_column('donations', sa.Column('card_last4', sa.String(4), nullable=True))
op.add_column('donations', sa.Column('card_brand', sa.String(20), nullable=True))
op.add_column('donations', sa.Column('stripe_receipt_url', sa.String(), nullable=True))
# Add indexes for Stripe transaction IDs in donations
op.create_index('idx_donations_payment_intent', 'donations', ['stripe_payment_intent_id'])
op.create_index('idx_donations_charge_id', 'donations', ['stripe_charge_id'])
op.create_index('idx_donations_customer_id', 'donations', ['stripe_customer_id'])
def downgrade() -> None:
# Remove indexes from donations
op.drop_index('idx_donations_customer_id', table_name='donations')
op.drop_index('idx_donations_charge_id', table_name='donations')
op.drop_index('idx_donations_payment_intent', table_name='donations')
# Remove columns from donations
op.drop_column('donations', 'stripe_receipt_url')
op.drop_column('donations', 'card_brand')
op.drop_column('donations', 'card_last4')
op.drop_column('donations', 'payment_completed_at')
op.drop_column('donations', 'stripe_customer_id')
op.drop_column('donations', 'stripe_charge_id')
# Remove indexes from subscriptions
op.drop_index('idx_subscriptions_invoice_id', table_name='subscriptions')
op.drop_index('idx_subscriptions_charge_id', table_name='subscriptions')
op.drop_index('idx_subscriptions_payment_intent', table_name='subscriptions')
# Remove columns from subscriptions
op.drop_column('subscriptions', 'stripe_receipt_url')
op.drop_column('subscriptions', 'card_brand')
op.drop_column('subscriptions', 'card_last4')
op.drop_column('subscriptions', 'payment_completed_at')
op.drop_column('subscriptions', 'stripe_invoice_id')
op.drop_column('subscriptions', 'stripe_charge_id')
op.drop_column('subscriptions', 'stripe_payment_intent_id')

View File

@@ -1,100 +0,0 @@
"""add_payment_methods
Revision ID: a1b2c3d4e5f6
Revises: 956ea1628264
Create Date: 2026-01-30 10:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = 'a1b2c3d4e5f6'
down_revision: Union[str, None] = '956ea1628264'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
conn = op.get_bind()
# Create PaymentMethodType enum
paymentmethodtype = postgresql.ENUM(
'card', 'cash', 'bank_transfer', 'check',
name='paymentmethodtype',
create_type=False
)
paymentmethodtype.create(conn, checkfirst=True)
# Check if stripe_customer_id column exists on users table
result = conn.execute(sa.text("""
SELECT column_name FROM information_schema.columns
WHERE table_name = 'users' AND column_name = 'stripe_customer_id'
"""))
if result.fetchone() is None:
# Add stripe_customer_id to users table
op.add_column('users', sa.Column(
'stripe_customer_id',
sa.String(),
nullable=True,
comment='Stripe Customer ID for payment method management'
))
op.create_index('ix_users_stripe_customer_id', 'users', ['stripe_customer_id'])
# Check if payment_methods table exists
result = conn.execute(sa.text("""
SELECT table_name FROM information_schema.tables
WHERE table_name = 'payment_methods'
"""))
if result.fetchone() is None:
# Create payment_methods table
op.create_table(
'payment_methods',
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
sa.Column('user_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('users.id', ondelete='CASCADE'), nullable=False),
sa.Column('stripe_payment_method_id', sa.String(), nullable=True, unique=True, comment='Stripe pm_xxx reference'),
sa.Column('card_brand', sa.String(20), nullable=True, comment='Card brand: visa, mastercard, amex, etc.'),
sa.Column('card_last4', sa.String(4), nullable=True, comment='Last 4 digits of card'),
sa.Column('card_exp_month', sa.Integer(), nullable=True, comment='Card expiration month'),
sa.Column('card_exp_year', sa.Integer(), nullable=True, comment='Card expiration year'),
sa.Column('card_funding', sa.String(20), nullable=True, comment='Card funding type: credit, debit, prepaid'),
sa.Column('payment_type', paymentmethodtype, nullable=False, server_default='card'),
sa.Column('is_default', sa.Boolean(), nullable=False, server_default='false', comment='Whether this is the default payment method for auto-renewals'),
sa.Column('is_active', sa.Boolean(), nullable=False, server_default='true', comment='Soft delete flag - False means removed'),
sa.Column('is_manual', sa.Boolean(), nullable=False, server_default='false', comment='True for manually recorded methods (cash/check)'),
sa.Column('manual_notes', sa.Text(), nullable=True, comment='Admin notes for manual payment methods'),
sa.Column('created_by', postgresql.UUID(as_uuid=True), sa.ForeignKey('users.id', ondelete='SET NULL'), nullable=True, comment='Admin who added this on behalf of user'),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now(), onupdate=sa.func.now()),
)
# Create indexes
op.create_index('ix_payment_methods_user_id', 'payment_methods', ['user_id'])
op.create_index('ix_payment_methods_stripe_pm_id', 'payment_methods', ['stripe_payment_method_id'])
op.create_index('idx_payment_method_user_default', 'payment_methods', ['user_id', 'is_default'])
op.create_index('idx_payment_method_active', 'payment_methods', ['user_id', 'is_active'])
def downgrade() -> None:
# Drop indexes
op.drop_index('idx_payment_method_active', table_name='payment_methods')
op.drop_index('idx_payment_method_user_default', table_name='payment_methods')
op.drop_index('ix_payment_methods_stripe_pm_id', table_name='payment_methods')
op.drop_index('ix_payment_methods_user_id', table_name='payment_methods')
# Drop payment_methods table
op.drop_table('payment_methods')
# Drop stripe_customer_id from users
op.drop_index('ix_users_stripe_customer_id', table_name='users')
op.drop_column('users', 'stripe_customer_id')
# Drop PaymentMethodType enum
paymentmethodtype = postgresql.ENUM(
'card', 'cash', 'bank_transfer', 'check',
name='paymentmethodtype'
)
paymentmethodtype.drop(op.get_bind(), checkfirst=True)

View File

@@ -1,68 +0,0 @@
"""add_system_settings_table
Revision ID: ec4cb4a49cde
Revises: 4fa11836f7fd
Create Date: 2026-01-16 18:16:00.283455
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import UUID
# revision identifiers, used by Alembic.
revision: str = 'ec4cb4a49cde'
down_revision: Union[str, None] = '4fa11836f7fd'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Create enum for setting types (only if not exists)
op.execute("""
DO $$ BEGIN
CREATE TYPE settingtype AS ENUM ('plaintext', 'encrypted', 'json');
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
""")
# Create system_settings table
op.execute("""
CREATE TABLE system_settings (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
setting_key VARCHAR(100) UNIQUE NOT NULL,
setting_value TEXT,
setting_type settingtype NOT NULL DEFAULT 'plaintext'::settingtype,
description TEXT,
updated_by UUID REFERENCES users(id) ON DELETE SET NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
is_sensitive BOOLEAN NOT NULL DEFAULT FALSE
);
COMMENT ON COLUMN system_settings.setting_key IS 'Unique setting identifier (e.g., stripe_secret_key)';
COMMENT ON COLUMN system_settings.setting_value IS 'Setting value (encrypted if setting_type is encrypted)';
COMMENT ON COLUMN system_settings.setting_type IS 'Type of setting: plaintext, encrypted, or json';
COMMENT ON COLUMN system_settings.description IS 'Human-readable description of the setting';
COMMENT ON COLUMN system_settings.updated_by IS 'User who last updated this setting';
COMMENT ON COLUMN system_settings.is_sensitive IS 'Whether this setting contains sensitive data';
""")
# Create indexes
op.create_index('idx_system_settings_key', 'system_settings', ['setting_key'])
op.create_index('idx_system_settings_updated_at', 'system_settings', ['updated_at'])
def downgrade() -> None:
# Drop indexes
op.drop_index('idx_system_settings_updated_at')
op.drop_index('idx_system_settings_key')
# Drop table
op.drop_table('system_settings')
# Drop enum
op.execute('DROP TYPE IF EXISTS settingtype')

View File

@@ -128,7 +128,7 @@ async def get_current_admin_user(current_user: User = Depends(get_current_user))
return current_user
async def get_active_member(current_user: User = Depends(get_current_user)) -> User:
"""Require user to be active member or staff with valid status"""
"""Require user to be active member with valid payment"""
from models import UserStatus
if current_user.status != UserStatus.active:
@@ -138,7 +138,7 @@ async def get_active_member(current_user: User = Depends(get_current_user)) -> U
)
role_code = get_user_role_code(current_user)
if role_code not in ["member", "admin", "superadmin", "finance"]:
if role_code not in ["member", "admin", "superadmin"]:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Member access only"

View File

@@ -1,15 +1,38 @@
#!/usr/bin/env python3
"""
Create Superadmin User Script
Directly creates a superadmin user in the database for LOAF membership platform
Generates a superadmin user with hashed password for LOAF membership platform
"""
import bcrypt
import sys
import os
from getpass import getpass
# Add the backend directory to path for imports
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
def generate_password_hash(password: str) -> str:
"""Generate bcrypt hash for password"""
return bcrypt.hashpw(password.encode(), bcrypt.gensalt()).decode()
def generate_sql(email: str, password_hash: str, first_name: str, last_name: str) -> str:
"""Generate SQL INSERT statement"""
return f"""
-- Create Superadmin User
INSERT INTO users (
id, email, password_hash, first_name, last_name,
status, role, email_verified, created_at, updated_at
) VALUES (
gen_random_uuid(),
'{email}',
'{password_hash}',
'{first_name}',
'{last_name}',
'active',
'superadmin',
true,
NOW(),
NOW()
);
"""
def main():
print("=" * 70)
@@ -17,15 +40,6 @@ def main():
print("=" * 70)
print()
# Check for DATABASE_URL
from dotenv import load_dotenv
load_dotenv()
database_url = os.getenv("DATABASE_URL")
if not database_url:
print("❌ DATABASE_URL not found in environment or .env file")
sys.exit(1)
# Get user input
email = input("Email address: ").strip()
if not email or '@' not in email:
@@ -54,89 +68,31 @@ def main():
sys.exit(1)
print()
print("Creating superadmin user...")
try:
# Import database dependencies
from sqlalchemy import create_engine, text
from passlib.context import CryptContext
# Create password hash
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
password_hash = pwd_context.hash(password)
# Connect to database
engine = create_engine(database_url)
with engine.connect() as conn:
# Check if user already exists
result = conn.execute(
text("SELECT id FROM users WHERE email = :email"),
{"email": email}
)
if result.fetchone():
print(f"❌ User with email '{email}' already exists")
sys.exit(1)
# Insert superadmin user
conn.execute(
text("""
INSERT INTO users (
id, email, password_hash, first_name, last_name,
phone, address, city, state, zipcode, date_of_birth,
status, role, email_verified,
newsletter_subscribed, accepts_tos,
created_at, updated_at
) VALUES (
gen_random_uuid(),
:email,
:password_hash,
:first_name,
:last_name,
'',
'',
'',
'',
'',
'1990-01-01',
'active',
'superadmin',
true,
false,
true,
NOW(),
NOW()
)
"""),
{
"email": email,
"password_hash": password_hash,
"first_name": first_name,
"last_name": last_name
}
)
conn.commit()
print("Generating password hash...")
password_hash = generate_password_hash(password)
print("✅ Password hash generated")
print()
print("=" * 70)
print("✅ Superadmin user created successfully!")
print("=" * 70)
print()
print(f" Email: {email}")
print(f" Name: {first_name} {last_name}")
print(f" Role: superadmin")
print(f" Status: active")
print()
print("You can now log in with these credentials.")
print("SQL STATEMENT")
print("=" * 70)
except ImportError as e:
print(f"❌ Missing dependency: {e}")
print(" Run: pip install sqlalchemy psycopg2-binary passlib python-dotenv")
sys.exit(1)
except Exception as e:
print(f"❌ Database error: {e}")
sys.exit(1)
sql = generate_sql(email, password_hash, first_name, last_name)
print(sql)
# Save to file
output_file = "create_superadmin.sql"
with open(output_file, 'w') as f:
f.write(sql)
print("=" * 70)
print(f"✅ SQL saved to: {output_file}")
print()
print("Run this command to create the user:")
print(f" psql -U postgres -d loaf_new -f {output_file}")
print()
print("Or copy the SQL above and run it directly in psql")
print("=" * 70)
if __name__ == "__main__":
try:
@@ -144,3 +100,6 @@ if __name__ == "__main__":
except KeyboardInterrupt:
print("\n\n❌ Cancelled by user")
sys.exit(1)
except Exception as e:
print(f"\n❌ Error: {e}")
sys.exit(1)

View File

@@ -1,7 +1,6 @@
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy.pool import QueuePool
import os
from dotenv import load_dotenv
from pathlib import Path
@@ -11,21 +10,7 @@ load_dotenv(ROOT_DIR / '.env')
DATABASE_URL = os.environ.get('DATABASE_URL', 'postgresql://user:password@localhost:5432/membership_db')
# Configure engine with connection pooling and connection health checks
engine = create_engine(
DATABASE_URL,
poolclass=QueuePool,
pool_size=5, # Keep 5 connections open
max_overflow=10, # Allow up to 10 extra connections during peak
pool_pre_ping=True, # CRITICAL: Test connections before using them
pool_recycle=3600, # Recycle connections every hour (prevents stale connections)
echo=False, # Set to True for SQL debugging
connect_args={
'connect_timeout': 10, # Timeout connection attempts after 10 seconds
'options': '-c statement_timeout=30000' # 30 second query timeout
}
)
engine = create_engine(DATABASE_URL)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()

View File

@@ -1,122 +0,0 @@
"""
Encryption service for sensitive settings stored in database.
Uses Fernet symmetric encryption (AES-128 in CBC mode with HMAC authentication).
The encryption key is derived from a master secret stored in .env.
"""
import os
import base64
from cryptography.fernet import Fernet
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
from cryptography.hazmat.backends import default_backend
class EncryptionService:
"""Service for encrypting and decrypting sensitive configuration values"""
def __init__(self):
# Get master encryption key from environment
# This should be a long, random string (e.g., 64 characters)
# Generate one with: python -c "import secrets; print(secrets.token_urlsafe(64))"
self.master_secret = os.environ.get('SETTINGS_ENCRYPTION_KEY')
if not self.master_secret:
raise ValueError(
"SETTINGS_ENCRYPTION_KEY environment variable not set. "
"Generate one with: python -c \"import secrets; print(secrets.token_urlsafe(64))\""
)
# Derive encryption key from master secret using PBKDF2HMAC
# This adds an extra layer of security
kdf = PBKDF2HMAC(
algorithm=hashes.SHA256(),
length=32,
salt=b'systemsettings', # Fixed salt (OK for key derivation from strong secret)
iterations=100000,
backend=default_backend()
)
key = base64.urlsafe_b64encode(kdf.derive(self.master_secret.encode()))
self.cipher = Fernet(key)
def encrypt(self, plaintext: str) -> str:
"""
Encrypt a plaintext string.
Args:
plaintext: The string to encrypt
Returns:
Base64-encoded encrypted string
"""
if not plaintext:
return ""
encrypted_bytes = self.cipher.encrypt(plaintext.encode())
return encrypted_bytes.decode('utf-8')
def decrypt(self, encrypted: str) -> str:
"""
Decrypt an encrypted string.
Args:
encrypted: The base64-encoded encrypted string
Returns:
Decrypted plaintext string
Raises:
cryptography.fernet.InvalidToken: If decryption fails (wrong key or corrupted data)
"""
if not encrypted:
return ""
decrypted_bytes = self.cipher.decrypt(encrypted.encode())
return decrypted_bytes.decode('utf-8')
def is_encrypted(self, value: str) -> bool:
"""
Check if a value appears to be encrypted (starts with Fernet token format).
This is a heuristic check - not 100% reliable but useful for validation.
Args:
value: String to check
Returns:
True if value looks like a Fernet token
"""
if not value:
return False
# Fernet tokens are base64-encoded and start with version byte (gAAAAA...)
# They're always > 60 characters
try:
return len(value) > 60 and value.startswith('gAAAAA')
except:
return False
# Global encryption service instance
# Initialize on module import so it fails fast if encryption key is missing
try:
encryption_service = EncryptionService()
except ValueError as e:
print(f"WARNING: {e}")
print("Encryption service will not be available.")
encryption_service = None
def get_encryption_service() -> EncryptionService:
"""
Get the global encryption service instance.
Raises:
ValueError: If encryption service is not initialized (missing SETTINGS_ENCRYPTION_KEY)
"""
if encryption_service is None:
raise ValueError(
"Encryption service not initialized. Set SETTINGS_ENCRYPTION_KEY environment variable."
)
return encryption_service

File diff suppressed because it is too large Load Diff

View File

@@ -94,30 +94,6 @@ BEGIN;
-- SECTION 2: Create Core Tables
-- ============================================================================
-- Import Jobs table (must be created before users due to FK reference)
CREATE TABLE IF NOT EXISTS import_jobs (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
filename VARCHAR NOT NULL,
status importjobstatus NOT NULL DEFAULT 'processing',
total_rows INTEGER DEFAULT 0,
processed_rows INTEGER DEFAULT 0,
success_count INTEGER DEFAULT 0,
error_count INTEGER DEFAULT 0,
error_log JSONB DEFAULT '[]'::jsonb,
-- WordPress import enhancements
field_mapping JSONB DEFAULT '{}'::jsonb,
wordpress_metadata JSONB DEFAULT '{}'::jsonb,
imported_user_ids JSONB DEFAULT '[]'::jsonb,
rollback_at TIMESTAMP WITH TIME ZONE,
rollback_by UUID, -- Will be updated with FK after users table exists
started_by UUID, -- Will be updated with FK after users table exists
started_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
completed_at TIMESTAMP WITH TIME ZONE
);
-- Users table
CREATE TABLE IF NOT EXISTS users (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
@@ -127,7 +103,6 @@ CREATE TABLE IF NOT EXISTS users (
password_hash VARCHAR NOT NULL,
email_verified BOOLEAN NOT NULL DEFAULT FALSE,
email_verification_token VARCHAR UNIQUE,
email_verification_expires TIMESTAMP WITH TIME ZONE,
-- Personal Information
first_name VARCHAR NOT NULL,
@@ -138,6 +113,7 @@ CREATE TABLE IF NOT EXISTS users (
state VARCHAR(2),
zipcode VARCHAR(10),
date_of_birth DATE,
bio TEXT,
-- Profile
profile_photo_url VARCHAR,
@@ -161,44 +137,20 @@ CREATE TABLE IF NOT EXISTS users (
-- Status & Role
status userstatus NOT NULL DEFAULT 'pending_email',
role userrole NOT NULL DEFAULT 'guest',
role_id UUID, -- For dynamic RBAC
role_id UUID, -- For dynamic RBAC (added in later migration)
-- Newsletter Preferences
newsletter_subscribed BOOLEAN DEFAULT TRUE,
newsletter_publish_name BOOLEAN DEFAULT FALSE NOT NULL,
newsletter_publish_photo BOOLEAN DEFAULT FALSE NOT NULL,
newsletter_publish_birthday BOOLEAN DEFAULT FALSE NOT NULL,
newsletter_publish_none BOOLEAN DEFAULT FALSE NOT NULL,
-- Volunteer Interests
volunteer_interests JSONB DEFAULT '[]'::jsonb,
-- Scholarship Request
scholarship_requested BOOLEAN DEFAULT FALSE NOT NULL,
scholarship_reason TEXT,
-- Directory Settings
show_in_directory BOOLEAN DEFAULT FALSE NOT NULL,
directory_email VARCHAR,
directory_bio TEXT,
directory_address VARCHAR,
directory_phone VARCHAR,
directory_dob DATE,
directory_partner_name VARCHAR,
-- Password Reset
password_reset_token VARCHAR,
password_reset_expires TIMESTAMP WITH TIME ZONE,
force_password_change BOOLEAN DEFAULT FALSE NOT NULL,
-- Terms of Service
accepts_tos BOOLEAN DEFAULT FALSE NOT NULL,
tos_accepted_at TIMESTAMP WITH TIME ZONE,
-- Rejection Tracking
rejection_reason TEXT,
rejected_at TIMESTAMP WITH TIME ZONE,
rejected_by UUID REFERENCES users(id),
-- Membership
member_since DATE,
accepts_tos BOOLEAN DEFAULT FALSE,
tos_accepted_at TIMESTAMP WITH TIME ZONE,
newsletter_subscribed BOOLEAN DEFAULT TRUE,
-- Reminder Tracking
-- Reminder Tracking (from migration 004)
email_verification_reminders_sent INTEGER DEFAULT 0 NOT NULL,
last_email_verification_reminder_at TIMESTAMP WITH TIME ZONE,
event_attendance_reminders_sent INTEGER DEFAULT 0 NOT NULL,
@@ -208,21 +160,12 @@ CREATE TABLE IF NOT EXISTS users (
renewal_reminders_sent INTEGER DEFAULT 0 NOT NULL,
last_renewal_reminder_at TIMESTAMP WITH TIME ZONE,
-- Rejection Tracking
rejection_reason TEXT,
rejected_at TIMESTAMP WITH TIME ZONE,
rejected_by UUID REFERENCES users(id),
-- WordPress Import Tracking
import_source VARCHAR(50),
import_job_id UUID REFERENCES import_jobs(id),
wordpress_user_id BIGINT,
wordpress_registered_date TIMESTAMP WITH TIME ZONE,
-- Role Change Audit Trail
role_changed_at TIMESTAMP WITH TIME ZONE,
role_changed_by UUID REFERENCES users(id) ON DELETE SET NULL,
-- Timestamps
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
@@ -312,23 +255,11 @@ CREATE TABLE IF NOT EXISTS subscription_plans (
name VARCHAR NOT NULL,
description TEXT,
price_cents INTEGER NOT NULL,
billing_cycle VARCHAR NOT NULL DEFAULT 'yearly',
stripe_price_id VARCHAR, -- Legacy, deprecated
billing_cycle VARCHAR NOT NULL DEFAULT 'annual',
-- Configuration
active BOOLEAN NOT NULL DEFAULT TRUE,
-- Custom billing cycle fields (for recurring date ranges like Jan 1 - Dec 31)
custom_cycle_enabled BOOLEAN DEFAULT FALSE NOT NULL,
custom_cycle_start_month INTEGER,
custom_cycle_start_day INTEGER,
custom_cycle_end_month INTEGER,
custom_cycle_end_day INTEGER,
-- Dynamic pricing fields
minimum_price_cents INTEGER DEFAULT 3000 NOT NULL,
suggested_price_cents INTEGER,
allow_donation BOOLEAN DEFAULT TRUE NOT NULL,
features JSONB DEFAULT '[]'::jsonb,
-- Timestamps
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
@@ -350,21 +281,13 @@ CREATE TABLE IF NOT EXISTS subscriptions (
status subscriptionstatus DEFAULT 'active',
start_date TIMESTAMP WITH TIME ZONE NOT NULL,
end_date TIMESTAMP WITH TIME ZONE,
next_billing_date TIMESTAMP WITH TIME ZONE,
-- Payment Details
amount_paid_cents INTEGER,
base_subscription_cents INTEGER NOT NULL,
donation_cents INTEGER DEFAULT 0 NOT NULL,
-- Stripe transaction metadata (for validation and audit)
stripe_payment_intent_id VARCHAR,
stripe_charge_id VARCHAR,
stripe_invoice_id VARCHAR,
payment_completed_at TIMESTAMP WITH TIME ZONE,
card_last4 VARCHAR(4),
card_brand VARCHAR(20),
stripe_receipt_url VARCHAR,
-- Manual Payment Support
manual_payment BOOLEAN DEFAULT FALSE NOT NULL,
manual_payment_notes TEXT,
@@ -396,14 +319,6 @@ CREATE TABLE IF NOT EXISTS donations (
stripe_payment_intent_id VARCHAR,
payment_method VARCHAR,
-- Stripe transaction metadata (for validation and audit)
stripe_charge_id VARCHAR,
stripe_customer_id VARCHAR,
payment_completed_at TIMESTAMP WITH TIME ZONE,
card_last4 VARCHAR(4),
card_brand VARCHAR(20),
stripe_receipt_url VARCHAR,
-- Metadata
notes TEXT,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
@@ -530,7 +445,7 @@ CREATE TABLE IF NOT EXISTS storage_usage (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
total_bytes_used BIGINT NOT NULL DEFAULT 0,
max_bytes_allowed BIGINT NOT NULL DEFAULT 1073741824, -- 1GB
max_bytes_allowed BIGINT NOT NULL DEFAULT 10737418240, -- 10GB
last_updated TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
@@ -551,10 +466,29 @@ CREATE TABLE IF NOT EXISTS user_invitations (
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Add FK constraints to import_jobs (now that users table exists)
ALTER TABLE import_jobs
ADD CONSTRAINT fk_import_jobs_rollback_by FOREIGN KEY (rollback_by) REFERENCES users(id),
ADD CONSTRAINT fk_import_jobs_started_by FOREIGN KEY (started_by) REFERENCES users(id);
-- Import Jobs table
CREATE TABLE IF NOT EXISTS import_jobs (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
filename VARCHAR NOT NULL,
status importjobstatus NOT NULL DEFAULT 'processing',
total_rows INTEGER DEFAULT 0,
processed_rows INTEGER DEFAULT 0,
success_count INTEGER DEFAULT 0,
error_count INTEGER DEFAULT 0,
error_log JSONB DEFAULT '[]'::jsonb,
-- WordPress import enhancements
field_mapping JSONB DEFAULT '{}'::jsonb,
wordpress_metadata JSONB DEFAULT '{}'::jsonb,
imported_user_ids JSONB DEFAULT '[]'::jsonb,
rollback_at TIMESTAMP WITH TIME ZONE,
rollback_by UUID REFERENCES users(id),
started_by UUID REFERENCES users(id),
started_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
completed_at TIMESTAMP WITH TIME ZONE
);
-- Import Rollback Audit table (for tracking rollback operations)
CREATE TABLE IF NOT EXISTS import_rollback_audit (
@@ -608,18 +542,12 @@ CREATE INDEX IF NOT EXISTS idx_subscriptions_user_id ON subscriptions(user_id);
CREATE INDEX IF NOT EXISTS idx_subscriptions_plan_id ON subscriptions(plan_id);
CREATE INDEX IF NOT EXISTS idx_subscriptions_status ON subscriptions(status);
CREATE INDEX IF NOT EXISTS idx_subscriptions_stripe_subscription_id ON subscriptions(stripe_subscription_id);
CREATE INDEX IF NOT EXISTS idx_subscriptions_payment_intent ON subscriptions(stripe_payment_intent_id);
CREATE INDEX IF NOT EXISTS idx_subscriptions_charge_id ON subscriptions(stripe_charge_id);
CREATE INDEX IF NOT EXISTS idx_subscriptions_invoice_id ON subscriptions(stripe_invoice_id);
-- Donations indexes
CREATE INDEX IF NOT EXISTS idx_donation_user ON donations(user_id);
CREATE INDEX IF NOT EXISTS idx_donation_type ON donations(donation_type);
CREATE INDEX IF NOT EXISTS idx_donation_status ON donations(status);
CREATE INDEX IF NOT EXISTS idx_donation_created ON donations(created_at);
CREATE INDEX IF NOT EXISTS idx_donation_payment_intent ON donations(stripe_payment_intent_id);
CREATE INDEX IF NOT EXISTS idx_donation_charge_id ON donations(stripe_charge_id);
CREATE INDEX IF NOT EXISTS idx_donation_customer_id ON donations(stripe_customer_id);
-- Import Jobs indexes
CREATE INDEX IF NOT EXISTS idx_import_jobs_status ON import_jobs(status);
@@ -659,7 +587,7 @@ INSERT INTO storage_usage (id, total_bytes_used, max_bytes_allowed, last_updated
SELECT
gen_random_uuid(),
0,
1073741824, -- 1GB
10737418240, -- 10GB
CURRENT_TIMESTAMP
WHERE NOT EXISTS (SELECT 1 FROM storage_usage);

117
models.py
View File

@@ -44,13 +44,6 @@ class DonationStatus(enum.Enum):
completed = "completed"
failed = "failed"
class PaymentMethodType(enum.Enum):
card = "card"
cash = "cash"
bank_transfer = "bank_transfer"
check = "check"
class User(Base):
__tablename__ = "users"
@@ -144,17 +137,6 @@ class User(Base):
wordpress_user_id = Column(BigInteger, nullable=True, comment="Original WordPress user ID")
wordpress_registered_date = Column(DateTime(timezone=True), nullable=True, comment="Original WordPress registration date")
# Role Change Audit Trail
role_changed_at = Column(DateTime(timezone=True), nullable=True, comment="Timestamp when role was last changed")
role_changed_by = Column(UUID(as_uuid=True), ForeignKey('users.id', ondelete='SET NULL'), nullable=True, comment="Admin who changed the role")
# Stripe Customer ID - Centralized for payment method management
stripe_customer_id = Column(String, nullable=True, index=True, comment="Stripe Customer ID for payment method management")
# Dynamic Registration Form - Custom field responses
custom_registration_data = Column(JSON, default=dict, nullable=False,
comment="Dynamic registration field responses stored as JSON for custom form fields")
created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc))
updated_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
@@ -163,53 +145,6 @@ class User(Base):
events_created = relationship("Event", back_populates="creator")
rsvps = relationship("EventRSVP", back_populates="user")
subscriptions = relationship("Subscription", back_populates="user", foreign_keys="Subscription.user_id")
role_changer = relationship("User", foreign_keys=[role_changed_by], remote_side="User.id", post_update=True)
payment_methods = relationship("PaymentMethod", back_populates="user", foreign_keys="PaymentMethod.user_id")
class PaymentMethod(Base):
"""Stored payment methods for users (Stripe or manual records)"""
__tablename__ = "payment_methods"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
user_id = Column(UUID(as_uuid=True), ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True)
# Stripe payment method reference
stripe_payment_method_id = Column(String, nullable=True, unique=True, index=True, comment="Stripe pm_xxx reference")
# Card details (stored for display purposes - PCI compliant)
card_brand = Column(String(20), nullable=True, comment="Card brand: visa, mastercard, amex, etc.")
card_last4 = Column(String(4), nullable=True, comment="Last 4 digits of card")
card_exp_month = Column(Integer, nullable=True, comment="Card expiration month")
card_exp_year = Column(Integer, nullable=True, comment="Card expiration year")
card_funding = Column(String(20), nullable=True, comment="Card funding type: credit, debit, prepaid")
# Payment type classification
payment_type = Column(SQLEnum(PaymentMethodType), default=PaymentMethodType.card, nullable=False)
# Status flags
is_default = Column(Boolean, default=False, nullable=False, comment="Whether this is the default payment method for auto-renewals")
is_active = Column(Boolean, default=True, nullable=False, comment="Soft delete flag - False means removed")
is_manual = Column(Boolean, default=False, nullable=False, comment="True for manually recorded methods (cash/check)")
# Manual payment notes (for cash/check records)
manual_notes = Column(Text, nullable=True, comment="Admin notes for manual payment methods")
# Audit trail
created_by = Column(UUID(as_uuid=True), ForeignKey("users.id", ondelete="SET NULL"), nullable=True, comment="Admin who added this on behalf of user")
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), nullable=False)
updated_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc), nullable=False)
# Relationships
user = relationship("User", back_populates="payment_methods", foreign_keys=[user_id])
creator = relationship("User", foreign_keys=[created_by])
# Composite index for efficient queries
__table_args__ = (
Index('idx_payment_method_user_default', 'user_id', 'is_default'),
Index('idx_payment_method_active', 'user_id', 'is_active'),
)
class Event(Base):
__tablename__ = "events"
@@ -298,15 +233,6 @@ class Subscription(Base):
donation_cents = Column(Integer, default=0, nullable=False) # Additional donation amount
# Note: amount_paid_cents = base_subscription_cents + donation_cents
# Stripe transaction metadata (for validation and audit)
stripe_payment_intent_id = Column(String, nullable=True, index=True) # Initial payment transaction ID
stripe_charge_id = Column(String, nullable=True, index=True) # Actual charge reference
stripe_invoice_id = Column(String, nullable=True, index=True) # Invoice reference
payment_completed_at = Column(DateTime(timezone=True), nullable=True) # Exact payment timestamp from Stripe
card_last4 = Column(String(4), nullable=True) # Last 4 digits of card
card_brand = Column(String(20), nullable=True) # Visa, Mastercard, etc.
stripe_receipt_url = Column(String, nullable=True) # Link to Stripe receipt
# Manual payment fields
manual_payment = Column(Boolean, default=False, nullable=False) # Whether this was a manual offline payment
manual_payment_notes = Column(Text, nullable=True) # Admin notes about the payment
@@ -338,17 +264,9 @@ class Donation(Base):
# Payment details
stripe_checkout_session_id = Column(String, nullable=True)
stripe_payment_intent_id = Column(String, nullable=True, index=True)
stripe_payment_intent_id = Column(String, nullable=True)
payment_method = Column(String, nullable=True) # card, bank_transfer, etc.
# Stripe transaction metadata (for validation and audit)
stripe_charge_id = Column(String, nullable=True, index=True) # Actual charge reference
stripe_customer_id = Column(String, nullable=True, index=True) # Customer ID if created
payment_completed_at = Column(DateTime(timezone=True), nullable=True) # Exact payment timestamp from Stripe
card_last4 = Column(String(4), nullable=True) # Last 4 digits of card
card_brand = Column(String(20), nullable=True) # Visa, Mastercard, etc.
stripe_receipt_url = Column(String, nullable=True) # Link to Stripe receipt
# Metadata
notes = Column(Text, nullable=True)
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
@@ -591,36 +509,3 @@ class ImportRollbackAudit(Base):
# Relationships
import_job = relationship("ImportJob")
admin_user = relationship("User", foreign_keys=[rolled_back_by])
# ============================================================
# System Settings Models
# ============================================================
class SettingType(enum.Enum):
plaintext = "plaintext"
encrypted = "encrypted"
json = "json"
class SystemSettings(Base):
"""System-wide configuration settings stored in database"""
__tablename__ = "system_settings"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
setting_key = Column(String(100), unique=True, nullable=False, index=True)
setting_value = Column(Text, nullable=True)
setting_type = Column(SQLEnum(SettingType), default=SettingType.plaintext, nullable=False)
description = Column(Text, nullable=True)
updated_by = Column(UUID(as_uuid=True), ForeignKey("users.id", ondelete="SET NULL"), nullable=True)
created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), nullable=False)
updated_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc), nullable=False)
is_sensitive = Column(Boolean, default=False, nullable=False)
# Relationships
updater = relationship("User", foreign_keys=[updated_by])
# Index on updated_at for audit queries
__table_args__ = (
Index('idx_system_settings_updated_at', 'updated_at'),
)

View File

@@ -11,9 +11,11 @@ from datetime import datetime, timezone, timedelta
# Load environment variables
load_dotenv()
# NOTE: Stripe credentials are now database-driven
# These .env fallbacks are kept for backward compatibility only
# The actual credentials are loaded dynamically from system_settings table
# Initialize Stripe with secret key
stripe.api_key = os.getenv("STRIPE_SECRET_KEY")
# Stripe webhook secret for signature verification
STRIPE_WEBHOOK_SECRET = os.getenv("STRIPE_WEBHOOK_SECRET")
def create_checkout_session(
user_id: str,
@@ -21,15 +23,11 @@ def create_checkout_session(
plan_id: str,
stripe_price_id: str,
success_url: str,
cancel_url: str,
db = None
cancel_url: str
):
"""
Create a Stripe Checkout session for subscription payment.
Args:
db: Database session (optional, for reading Stripe credentials from database)
Args:
user_id: User's UUID
user_email: User's email address
@@ -41,28 +39,6 @@ def create_checkout_session(
Returns:
dict: Checkout session object with session ID and URL
"""
# Load Stripe API key from database if available
if db:
try:
# Import here to avoid circular dependency
from models import SystemSettings, SettingType
from encryption_service import get_encryption_service
setting = db.query(SystemSettings).filter(
SystemSettings.setting_key == 'stripe_secret_key'
).first()
if setting and setting.setting_value:
encryption_service = get_encryption_service()
stripe.api_key = encryption_service.decrypt(setting.setting_value)
except Exception as e:
# Fallback to .env if database read fails
print(f"Failed to read Stripe key from database: {e}")
stripe.api_key = os.getenv("STRIPE_SECRET_KEY")
else:
# Fallback to .env if no db session
stripe.api_key = os.getenv("STRIPE_SECRET_KEY")
try:
# Create Checkout Session
checkout_session = stripe.checkout.Session.create(
@@ -98,14 +74,13 @@ def create_checkout_session(
raise Exception(f"Stripe error: {str(e)}")
def verify_webhook_signature(payload: bytes, sig_header: str, db=None) -> dict:
def verify_webhook_signature(payload: bytes, sig_header: str) -> dict:
"""
Verify Stripe webhook signature and construct event.
Args:
payload: Raw webhook payload bytes
sig_header: Stripe signature header
db: Database session (optional, for reading webhook secret from database)
Returns:
dict: Verified webhook event
@@ -113,32 +88,9 @@ def verify_webhook_signature(payload: bytes, sig_header: str, db=None) -> dict:
Raises:
ValueError: If signature verification fails
"""
# Load webhook secret from database if available
webhook_secret = None
if db:
try:
from models import SystemSettings
from encryption_service import get_encryption_service
setting = db.query(SystemSettings).filter(
SystemSettings.setting_key == 'stripe_webhook_secret'
).first()
if setting and setting.setting_value:
encryption_service = get_encryption_service()
webhook_secret = encryption_service.decrypt(setting.setting_value)
except Exception as e:
print(f"Failed to read webhook secret from database: {e}")
webhook_secret = os.getenv("STRIPE_WEBHOOK_SECRET")
else:
webhook_secret = os.getenv("STRIPE_WEBHOOK_SECRET")
if not webhook_secret:
raise ValueError("STRIPE_WEBHOOK_SECRET not configured")
try:
event = stripe.Webhook.construct_event(
payload, sig_header, webhook_secret
payload, sig_header, STRIPE_WEBHOOK_SECRET
)
return event
except ValueError as e:

View File

@@ -327,38 +327,6 @@ PERMISSIONS = [
"module": "gallery"
},
# ========== PAYMENT METHODS MODULE ==========
{
"code": "payment_methods.view",
"name": "View Payment Methods",
"description": "View user payment methods (masked)",
"module": "payment_methods"
},
{
"code": "payment_methods.view_sensitive",
"name": "View Sensitive Payment Details",
"description": "View full payment method details including Stripe IDs (requires password)",
"module": "payment_methods"
},
{
"code": "payment_methods.create",
"name": "Create Payment Methods",
"description": "Add payment methods on behalf of users",
"module": "payment_methods"
},
{
"code": "payment_methods.delete",
"name": "Delete Payment Methods",
"description": "Delete user payment methods",
"module": "payment_methods"
},
{
"code": "payment_methods.set_default",
"name": "Set Default Payment Method",
"description": "Set a user's default payment method",
"module": "payment_methods"
},
# ========== SETTINGS MODULE ==========
{
"code": "settings.view",
@@ -485,10 +453,6 @@ DEFAULT_ROLE_PERMISSIONS = {
"gallery.edit",
"gallery.delete",
"gallery.moderate",
"payment_methods.view",
"payment_methods.create",
"payment_methods.delete",
"payment_methods.set_default",
"settings.view",
"settings.edit",
"settings.email_templates",
@@ -496,36 +460,6 @@ DEFAULT_ROLE_PERMISSIONS = {
"settings.logs",
],
UserRole.finance: [
# Finance role has all admin permissions plus sensitive payment access
"users.view",
"users.export",
"events.view",
"events.rsvps",
"events.calendar_export",
"subscriptions.view",
"subscriptions.create",
"subscriptions.edit",
"subscriptions.cancel",
"subscriptions.activate",
"subscriptions.plans",
"financials.view",
"financials.create",
"financials.edit",
"financials.delete",
"financials.export",
"financials.payments",
"newsletters.view",
"bylaws.view",
"gallery.view",
"payment_methods.view",
"payment_methods.view_sensitive", # Finance can view sensitive payment details
"payment_methods.create",
"payment_methods.delete",
"payment_methods.set_default",
"settings.view",
],
# Superadmin gets all permissions automatically in code,
# so we don't need to explicitly assign them
UserRole.superadmin: []

View File

@@ -35,29 +35,6 @@ class R2Storage:
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': ['.xlsx']
}
# Branding assets (logo and favicon)
ALLOWED_BRANDING_TYPES = {
'image/jpeg': ['.jpg', '.jpeg'],
'image/png': ['.png'],
'image/webp': ['.webp'],
'image/svg+xml': ['.svg']
}
ALLOWED_FAVICON_TYPES = {
'image/x-icon': ['.ico'],
'image/vnd.microsoft.icon': ['.ico'],
'image/png': ['.png'],
'image/svg+xml': ['.svg']
}
# CSV files for imports
ALLOWED_CSV_TYPES = {
'text/csv': ['.csv'],
'text/plain': ['.csv'], # Some systems report CSV as text/plain
'application/csv': ['.csv'],
'application/vnd.ms-excel': ['.csv'], # Old Excel type sometimes used for CSV
}
def __init__(self):
"""Initialize R2 client with credentials from environment"""
self.account_id = os.getenv('R2_ACCOUNT_ID')
@@ -248,127 +225,6 @@ class R2Storage:
except ClientError:
return False
async def upload_bytes(
self,
content: bytes,
folder: str,
filename: str,
content_type: str = 'text/csv'
) -> tuple[str, str, int]:
"""
Upload raw bytes to R2 storage (useful for CSV imports)
Args:
content: Raw bytes to upload
folder: Folder path in R2 (e.g., 'imports/job-id')
filename: Original filename
content_type: MIME type of the content
Returns:
tuple: (public_url, object_key, file_size_bytes)
Raises:
HTTPException: If upload fails
"""
try:
file_size = len(content)
# Generate unique filename preserving original extension
file_extension = Path(filename).suffix.lower() or '.csv'
unique_filename = f"{uuid.uuid4()}{file_extension}"
object_key = f"{folder}/{unique_filename}"
# Upload to R2
self.client.put_object(
Bucket=self.bucket_name,
Key=object_key,
Body=content,
ContentType=content_type,
ContentLength=file_size
)
# Generate public URL
public_url = self.get_public_url(object_key)
return public_url, object_key, file_size
except ClientError as e:
raise HTTPException(
status_code=500,
detail=f"Failed to upload to R2: {str(e)}"
)
except Exception as e:
raise HTTPException(
status_code=500,
detail=f"Upload error: {str(e)}"
)
async def download_file(self, object_key: str) -> bytes:
"""
Download a file from R2 storage
Args:
object_key: The S3 object key (path) of the file
Returns:
bytes: File content
Raises:
HTTPException: If download fails
"""
try:
response = self.client.get_object(
Bucket=self.bucket_name,
Key=object_key
)
return response['Body'].read()
except ClientError as e:
if e.response['Error']['Code'] == 'NoSuchKey':
raise HTTPException(status_code=404, detail="File not found in storage")
raise HTTPException(
status_code=500,
detail=f"Failed to download file from R2: {str(e)}"
)
except Exception as e:
raise HTTPException(
status_code=500,
detail=f"Download error: {str(e)}"
)
async def delete_multiple(self, object_keys: list[str]) -> bool:
"""
Delete multiple files from R2 storage
Args:
object_keys: List of S3 object keys to delete
Returns:
bool: True if successful
Raises:
HTTPException: If deletion fails
"""
if not object_keys:
return True
try:
# R2/S3 delete_objects accepts up to 1000 keys at once
objects = [{'Key': key} for key in object_keys if key]
if objects:
self.client.delete_objects(
Bucket=self.bucket_name,
Delete={'Objects': objects}
)
return True
except ClientError as e:
raise HTTPException(
status_code=500,
detail=f"Failed to delete files from R2: {str(e)}"
)
# Singleton instance
_r2_storage = None

View File

@@ -2,7 +2,7 @@
"""
Permission Seeding Script for Dynamic RBAC System
This script populates the database with 65 granular permissions and assigns them
This script populates the database with 59 granular permissions and assigns them
to the appropriate dynamic roles (not the old enum roles).
Usage:
@@ -33,7 +33,7 @@ engine = create_engine(DATABASE_URL)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
# ============================================================
# Permission Definitions (65 permissions across 11 modules)
# Permission Definitions (59 permissions across 10 modules)
# ============================================================
PERMISSIONS = [
@@ -116,55 +116,6 @@ PERMISSIONS = [
{"code": "permissions.assign", "name": "Assign Permissions", "description": "Assign permissions to roles", "module": "permissions"},
{"code": "permissions.manage_roles", "name": "Manage Roles", "description": "Create and manage user roles", "module": "permissions"},
{"code": "permissions.audit", "name": "View Permission Audit Log", "description": "View permission change audit logs", "module": "permissions"},
# ========== PAYMENT METHODS MODULE (5) ==========
{"code": "payment_methods.view", "name": "View Payment Methods", "description": "View user payment methods (masked)", "module": "payment_methods"},
{"code": "payment_methods.view_sensitive", "name": "View Sensitive Payment Details", "description": "View full Stripe payment method IDs (requires password)", "module": "payment_methods"},
{"code": "payment_methods.create", "name": "Create Payment Methods", "description": "Add payment methods on behalf of users", "module": "payment_methods"},
{"code": "payment_methods.delete", "name": "Delete Payment Methods", "description": "Remove user payment methods", "module": "payment_methods"},
{"code": "payment_methods.set_default", "name": "Set Default Payment Method", "description": "Set default payment method for users", "module": "payment_methods"},
# ========== REGISTRATION MODULE (2) ==========
{"code": "registration.view", "name": "View Registration Settings", "description": "View registration form schema and settings", "module": "registration"},
{"code": "registration.manage", "name": "Manage Registration Form", "description": "Edit registration form schema, steps, and fields", "module": "registration"},
# ========== DIRECTORY MODULE (2) ==========
{"code": "directory.view", "name": "View Directory Settings", "description": "View member directory field configuration", "module": "directory"},
{"code": "directory.manage", "name": "Manage Directory Fields", "description": "Enable/disable directory fields shown in Profile and Directory pages", "module": "directory"},
]
# Default system roles that must exist
DEFAULT_ROLES = [
{
"code": "guest",
"name": "Guest",
"description": "Default role for new registrations with no special permissions",
"is_system_role": True
},
{
"code": "member",
"name": "Member",
"description": "Active paying members with access to member-only content",
"is_system_role": True
},
{
"code": "finance",
"name": "Finance",
"description": "Financial management role with access to payments, subscriptions, and reports",
"is_system_role": True
},
{
"code": "admin",
"name": "Admin",
"description": "Board members with full management access except RBAC",
"is_system_role": True
},
{
"code": "superadmin",
"name": "Superadmin",
"description": "Full system access including RBAC management",
"is_system_role": True
},
]
# Default permission assignments for dynamic roles
@@ -185,9 +136,6 @@ DEFAULT_ROLE_PERMISSIONS = {
"subscriptions.cancel", "subscriptions.activate", "subscriptions.plans",
"subscriptions.export",
"donations.view", "donations.export",
# Payment methods - finance can view sensitive details
"payment_methods.view", "payment_methods.view_sensitive",
"payment_methods.create", "payment_methods.delete", "payment_methods.set_default",
],
"admin": [
@@ -209,13 +157,6 @@ DEFAULT_ROLE_PERMISSIONS = {
"gallery.view", "gallery.upload", "gallery.edit", "gallery.delete", "gallery.moderate",
"settings.view", "settings.edit", "settings.email_templates", "settings.storage",
"settings.logs",
# Payment methods - admin can manage but not view sensitive details
"payment_methods.view", "payment_methods.create",
"payment_methods.delete", "payment_methods.set_default",
# Registration form management
"registration.view", "registration.manage",
# Directory configuration
"directory.view", "directory.manage",
],
"superadmin": [
@@ -255,34 +196,7 @@ def seed_permissions():
print(f"\n⚠️ WARNING: Tables not fully cleared! Stopping.")
return
# Step 2: Create default system roles
print(f"\n👤 Creating {len(DEFAULT_ROLES)} system roles...")
role_map = {}
for role_data in DEFAULT_ROLES:
# Check if role already exists
existing_role = db.query(Role).filter(Role.code == role_data["code"]).first()
if existing_role:
print(f"{role_data['name']}: Already exists, updating...")
existing_role.name = role_data["name"]
existing_role.description = role_data["description"]
existing_role.is_system_role = role_data["is_system_role"]
role_map[role_data["code"]] = existing_role
else:
print(f"{role_data['name']}: Creating...")
role = Role(
code=role_data["code"],
name=role_data["name"],
description=role_data["description"],
is_system_role=role_data["is_system_role"]
)
db.add(role)
role_map[role_data["code"]] = role
db.commit()
print(f"✓ Created/updated {len(DEFAULT_ROLES)} system roles")
# Step 3: Create permissions
# Step 2: Create permissions
print(f"\n📝 Creating {len(PERMISSIONS)} permissions...")
permission_map = {} # Map code to permission object
@@ -299,13 +213,13 @@ def seed_permissions():
db.commit()
print(f"✓ Created {len(PERMISSIONS)} permissions")
# Step 4: Verify roles exist
print("\n🔍 Verifying dynamic roles...")
# Step 3: Get all roles from database
print("\n🔍 Fetching dynamic roles...")
roles = db.query(Role).all()
role_map = {role.code: role for role in roles}
print(f"✓ Found {len(roles)} roles: {', '.join(role_map.keys())}")
# Step 5: Assign permissions to roles
# Step 4: Assign permissions to roles
print("\n🔐 Assigning permissions to roles...")
from models import UserRole # Import for enum mapping
@@ -344,7 +258,7 @@ def seed_permissions():
db.commit()
print(f"{role.name}: Assigned {len(permission_codes)} permissions")
# Step 6: Summary
# Step 5: Summary
print("\n" + "=" * 80)
print("📊 SEEDING SUMMARY")
print("=" * 80)
@@ -359,8 +273,7 @@ def seed_permissions():
for module, count in sorted(modules.items()):
print(f"{module.capitalize()}: {count} permissions")
print(f"\nTotal system roles created: {len(DEFAULT_ROLES)}")
print(f"Total permissions created: {len(PERMISSIONS)}")
print(f"\nTotal permissions created: {len(PERMISSIONS)}")
print(f"Total role-permission mappings: {total_assigned}")
print("\n✅ Permission seeding completed successfully!")
print("\nNext step: Restart backend server")

4948
server.py

File diff suppressed because it is too large Load Diff

View File

@@ -10,127 +10,21 @@ Key Features:
- Validate and standardize user data (DOB, phone numbers)
- Generate smart status suggestions based on approval and subscription data
- Comprehensive data quality analysis and error reporting
- Multi-file import support (Users, Members, Payments CSVs)
- Field mapping based on Meta Name Reference document
Author: Claude Code
Date: 2025-12-24
Updated: 2026-02-03 - Added comprehensive multi-file import support
"""
import csv
import re
import logging
from datetime import datetime
from typing import Dict, List, Optional, Tuple, Any
from typing import Dict, List, Optional, Tuple
import phpserialize
import pandas as pd
logger = logging.getLogger(__name__)
# ============================================================================
# Meta Name Reference Field Mapping (from client's WordPress export)
# ============================================================================
# Maps WordPress meta names to our database fields
# Format: 'wordpress_meta_name': ('db_field', 'field_type', 'parser_function')
META_FIELD_MAPPING = {
# Basic user info
'first_name': ('first_name', 'string', None),
'last_name': ('last_name', 'string', None),
'user_email': ('email', 'string', 'lowercase'),
'user_login': ('username', 'string', None), # For reference only
'address': ('address', 'string', None),
'city': ('city', 'string', None),
'state': ('state', 'string', None),
'zipcode': ('zipcode', 'string', None),
'cell_phone': ('phone', 'string', 'phone'),
'date_of_birth': ('date_of_birth', 'date', 'date_mmddyyyy'),
# Partner info
'partner_first_name': ('partner_first_name', 'string', None),
'partner_last_name': ('partner_last_name', 'string', None),
'partner_membership_status': ('partner_is_member', 'boolean', 'yes_no'),
'partner_membership_consideration': ('partner_plan_to_become_member', 'boolean', 'yes_no'),
# Newsletter preferences
'newsletter_consent': ('newsletter_subscribed', 'boolean', 'yes_no'),
'newsletter_checklist': ('newsletter_preferences', 'multi_value', 'newsletter_checklist'),
# Referral and lead sources
'member_referral': ('referred_by_member_name', 'string', None),
'referral_source': ('lead_sources', 'multi_value', 'lead_sources'),
# Volunteer interests
'volunteer_checklist': ('volunteer_interests', 'multi_value', 'volunteer_checklist'),
# Scholarship
'scholarship_request': ('scholarship_requested', 'boolean', 'yes_no'),
'scholarship_reason': ('scholarship_reason', 'string', None),
# Directory settings
'members_directory_filter': ('show_in_directory', 'boolean', 'yes_no'),
'md_display_name': ('custom_registration_data.directory_display_name', 'custom', None),
'md_email': ('directory_email', 'string', None),
'description': ('directory_bio', 'string', None),
'md_adress': ('directory_address', 'string', None), # Note: typo in WordPress
'md_phone': ('directory_phone', 'string', None),
'md_dob': ('directory_dob', 'date', 'date_mmddyyyy'),
'md_partner_name': ('directory_partner_name', 'string', None),
'md_avatar': ('profile_photo_url', 'string', None),
# Metadata
'member_since': ('member_since', 'date', 'date_various'),
'user_registered': ('wordpress_registered_date', 'datetime', 'datetime_mysql'),
'ID': ('wordpress_user_id', 'integer', None),
# Stripe info (from WordPress)
'pms_stripe_customer_id': ('stripe_customer_id', 'string', None),
}
# Newsletter checklist option mapping
NEWSLETTER_CHECKLIST_OPTIONS = {
'name': 'newsletter_publish_name',
'photo': 'newsletter_publish_photo',
'birthday': 'newsletter_publish_birthday',
'none': 'newsletter_publish_none',
# Handle various WordPress stored formats
'my name': 'newsletter_publish_name',
'my photo': 'newsletter_publish_photo',
'my birthday': 'newsletter_publish_birthday',
}
# Volunteer interests mapping (WordPress values to our format)
VOLUNTEER_INTERESTS_MAP = {
'events': 'Events',
'fundraising': 'Fundraising',
'communications': 'Communications',
'membership': 'Membership',
'board': 'Board of Directors',
'other': 'Other',
# Handle various WordPress formats
'help with events': 'Events',
'help with fundraising': 'Fundraising',
'help with communications': 'Communications',
'help with membership': 'Membership',
'serve on the board': 'Board of Directors',
}
# Lead sources mapping
LEAD_SOURCES_MAP = {
'current member': 'Current member',
'friend': 'Friend',
'outsmart magazine': 'OutSmart Magazine',
'outsmart': 'OutSmart Magazine',
'search engine': 'Search engine (Google etc.)',
'google': 'Search engine (Google etc.)',
'known about loaf': "I've known about LOAF for a long time",
'long time': "I've known about LOAF for a long time",
'other': 'Other',
}
# ============================================================================
# WordPress Role Mapping Configuration
# ============================================================================
@@ -389,622 +283,6 @@ def validate_dob(dob_str: str) -> Tuple[Optional[datetime], Optional[str]]:
return None, f'Invalid date format: {dob_str} (expected MM/DD/YYYY)'
# ============================================================================
# Enhanced Field Parsers for Meta Name Reference
# ============================================================================
def parse_boolean_yes_no(value: Any) -> bool:
"""
Parse yes/no style boolean values from WordPress.
Handles: yes, no, true, false, 1, 0, checked, unchecked
"""
if value is None or (isinstance(value, float) and pd.isna(value)):
return False
str_val = str(value).lower().strip()
return str_val in ('yes', 'true', '1', 'checked', 'on', 'y')
def parse_date_various(date_str: Any) -> Optional[datetime]:
"""
Parse dates in various formats commonly found in WordPress exports.
Handles:
- MM/DD/YYYY (US format)
- YYYY-MM-DD (ISO format)
- DD/MM/YYYY (EU format - attempted if US fails)
- Month DD, YYYY (e.g., "January 15, 2020")
"""
if date_str is None or (isinstance(date_str, float) and pd.isna(date_str)):
return None
date_str = str(date_str).strip()
if not date_str or date_str.lower() == 'nan':
return None
# Try various formats
formats = [
'%m/%d/%Y', # US: 01/15/2020
'%Y-%m-%d', # ISO: 2020-01-15
'%d/%m/%Y', # EU: 15/01/2020
'%B %d, %Y', # Full: January 15, 2020
'%b %d, %Y', # Short: Jan 15, 2020
'%Y-%m-%d %H:%M:%S', # MySQL datetime
'%m/%Y', # Month/Year: 01/2020
'%m-%Y', # Month-Year: 01-2020
'%b-%Y', # Short month-Year: Jan-2020
'%B-%Y', # Full month-Year: January-2020
]
for fmt in formats:
try:
parsed = datetime.strptime(date_str, fmt)
# Validate year range
if 1900 <= parsed.year <= datetime.now().year + 1:
return parsed
except ValueError:
continue
# Only log warning for strings that look like dates
if date_str and len(date_str) > 3:
logger.debug(f"Could not parse date: {date_str}")
return None
def parse_datetime_mysql(dt_str: Any) -> Optional[datetime]:
"""Parse MySQL datetime format: YYYY-MM-DD HH:MM:SS"""
if dt_str is None or (isinstance(dt_str, float) and pd.isna(dt_str)):
return None
try:
return datetime.strptime(str(dt_str).strip(), '%Y-%m-%d %H:%M:%S')
except ValueError:
return parse_date_various(dt_str)
def parse_newsletter_checklist(value: Any) -> Dict[str, bool]:
"""
Parse newsletter checklist multi-value field.
WordPress stores this as comma-separated or PHP serialized values.
Returns dict mapping to our newsletter_publish_* fields.
"""
result = {
'newsletter_publish_name': False,
'newsletter_publish_photo': False,
'newsletter_publish_birthday': False,
'newsletter_publish_none': False,
}
if value is None or (isinstance(value, float) and pd.isna(value)):
return result
str_val = str(value).lower().strip()
if not str_val or str_val == 'nan':
return result
# Try PHP serialized first
if str_val.startswith('a:'):
try:
parsed = phpserialize.loads(str_val.encode('utf-8'))
if isinstance(parsed, dict):
for key in parsed.keys():
key_str = key.decode('utf-8') if isinstance(key, bytes) else str(key)
key_lower = key_str.lower()
for match_key, field in NEWSLETTER_CHECKLIST_OPTIONS.items():
if match_key in key_lower:
result[field] = True
return result
except Exception:
pass
# Try comma-separated values
items = [item.strip().lower() for item in str_val.split(',')]
for item in items:
for match_key, field in NEWSLETTER_CHECKLIST_OPTIONS.items():
if match_key in item:
result[field] = True
return result
def parse_volunteer_checklist(value: Any) -> List[str]:
"""
Parse volunteer interests checklist.
Returns list of standardized volunteer interest labels.
"""
if value is None or (isinstance(value, float) and pd.isna(value)):
return []
str_val = str(value).lower().strip()
if not str_val or str_val == 'nan':
return []
interests = []
# Try PHP serialized first
if str_val.startswith('a:'):
try:
parsed = phpserialize.loads(str_val.encode('utf-8'))
if isinstance(parsed, dict):
for key in parsed.keys():
key_str = key.decode('utf-8') if isinstance(key, bytes) else str(key)
key_lower = key_str.lower()
for match_key, label in VOLUNTEER_INTERESTS_MAP.items():
if match_key in key_lower and label not in interests:
interests.append(label)
return interests
except Exception:
pass
# Try comma-separated values
items = [item.strip().lower() for item in str_val.split(',')]
for item in items:
for match_key, label in VOLUNTEER_INTERESTS_MAP.items():
if match_key in item and label not in interests:
interests.append(label)
return interests
def parse_lead_sources(value: Any) -> List[str]:
"""
Parse referral/lead sources field.
Returns list of standardized lead source labels.
"""
if value is None or (isinstance(value, float) and pd.isna(value)):
return []
str_val = str(value).lower().strip()
if not str_val or str_val == 'nan':
return []
sources = []
# Try PHP serialized first
if str_val.startswith('a:'):
try:
parsed = phpserialize.loads(str_val.encode('utf-8'))
if isinstance(parsed, dict):
for key in parsed.keys():
key_str = key.decode('utf-8') if isinstance(key, bytes) else str(key)
key_lower = key_str.lower()
for match_key, label in LEAD_SOURCES_MAP.items():
if match_key in key_lower and label not in sources:
sources.append(label)
return sources
except Exception:
pass
# Try comma-separated values
items = [item.strip().lower() for item in str_val.split(',')]
for item in items:
matched = False
for match_key, label in LEAD_SOURCES_MAP.items():
if match_key in item and label not in sources:
sources.append(label)
matched = True
break
# If no match, add as "Other" with original value
if not matched and item:
sources.append('Other')
return sources
def transform_csv_row_to_user_data(row: Dict[str, Any], existing_emails: set = None) -> Dict[str, Any]:
"""
Transform a CSV row to user data dictionary using Meta Name Reference mapping.
Args:
row: Dictionary of CSV column values
existing_emails: Set of emails already in database (for duplicate check)
Returns:
Dictionary with:
- user_data: Fields that map to User model
- custom_data: Fields for custom_registration_data JSON
- newsletter_prefs: Newsletter preference booleans
- warnings: List of warning messages
- errors: List of error messages
"""
user_data = {}
custom_data = {}
newsletter_prefs = {}
warnings = []
errors = []
# Process each mapped field
for csv_field, (db_field, field_type, parser) in META_FIELD_MAPPING.items():
value = row.get(csv_field)
# Skip if no value
if value is None or (isinstance(value, float) and pd.isna(value)):
continue
try:
# Parse based on field type
if field_type == 'string':
if parser == 'lowercase':
parsed_value = str(value).strip().lower()
elif parser == 'phone':
parsed_value = standardize_phone(value)
if parsed_value == '0000000000':
warnings.append(f'Invalid phone: {value}')
else:
parsed_value = str(value).strip() if value else None
elif field_type == 'integer':
parsed_value = int(value) if value else None
elif field_type == 'boolean':
parsed_value = parse_boolean_yes_no(value)
elif field_type == 'date':
if parser == 'date_mmddyyyy':
parsed_value, warning = validate_dob(value)
if warning:
warnings.append(warning)
else:
parsed_value = parse_date_various(value)
elif field_type == 'datetime':
parsed_value = parse_datetime_mysql(value)
elif field_type == 'multi_value':
if parser == 'newsletter_checklist':
newsletter_prefs = parse_newsletter_checklist(value)
continue # Handled separately
elif parser == 'volunteer_checklist':
parsed_value = parse_volunteer_checklist(value)
elif parser == 'lead_sources':
parsed_value = parse_lead_sources(value)
else:
parsed_value = [str(value)]
elif field_type == 'custom':
# Store in custom_registration_data
custom_field = db_field.replace('custom_registration_data.', '')
custom_data[custom_field] = str(value).strip() if value else None
continue
else:
parsed_value = value
# Store in appropriate location
if parsed_value is not None:
user_data[db_field] = parsed_value
except Exception as e:
warnings.append(f'Error parsing {csv_field}: {str(e)}')
# Check for required fields
if not user_data.get('email'):
errors.append('Missing email address')
elif existing_emails and user_data['email'] in existing_emails:
errors.append('Email already exists in database')
if not user_data.get('first_name'):
warnings.append('Missing first name')
if not user_data.get('last_name'):
warnings.append('Missing last name')
return {
'user_data': user_data,
'custom_data': custom_data,
'newsletter_prefs': newsletter_prefs,
'warnings': warnings,
'errors': errors
}
# ============================================================================
# Members CSV Parser (Subscription Data)
# ============================================================================
def parse_members_csv(file_path: str) -> Dict[str, Any]:
"""
Parse WordPress PMS Members export CSV for subscription data.
Args:
file_path: Path to pms-export-members CSV file
Returns:
Dictionary mapping user_email to subscription data
"""
members_data = {}
try:
df = pd.read_csv(file_path)
for _, row in df.iterrows():
email = str(row.get('user_email', '')).strip().lower()
if not email or email == 'nan':
continue
# Parse subscription dates
start_date = parse_date_various(row.get('start_date'))
expiration_date = parse_date_various(row.get('expiration_date'))
# Map subscription status
wp_status = str(row.get('status', '')).lower().strip()
if wp_status == 'active':
sub_status = 'active'
elif wp_status in ('expired', 'abandoned'):
sub_status = 'expired'
elif wp_status in ('canceled', 'cancelled'):
sub_status = 'cancelled'
else:
sub_status = 'active' # Default
# Parse payment gateway
payment_gateway = str(row.get('payment_gateway', '')).lower().strip()
if 'stripe' in payment_gateway:
payment_method = 'stripe'
elif 'paypal' in payment_gateway:
payment_method = 'paypal'
elif payment_gateway in ('manual', 'admin', ''):
payment_method = 'manual'
else:
payment_method = payment_gateway or 'manual'
members_data[email] = {
'subscription_plan_id': row.get('subscription_plan_id'),
'subscription_plan_name': row.get('subscription_plan_name'),
'start_date': start_date,
'end_date': expiration_date,
'status': sub_status,
'payment_method': payment_method,
'wordpress_user_id': row.get('user_id'),
'billing_first_name': row.get('billing_first_name'),
'billing_last_name': row.get('billing_last_name'),
'billing_address': row.get('billing_address'),
'billing_city': row.get('billing_city'),
'billing_state': row.get('billing_state'),
'billing_zip': row.get('billing_zip'),
'card_last4': row.get('billing_card_last4'),
}
except Exception as e:
logger.error(f"Error parsing members CSV: {str(e)}")
raise
return members_data
# ============================================================================
# Payments CSV Parser (Payment History)
# ============================================================================
def parse_payments_csv(file_path: str) -> Dict[str, List[Dict]]:
"""
Parse WordPress PMS Payments export CSV for payment history.
Args:
file_path: Path to pms-export-payments CSV file
Returns:
Dictionary mapping user_email to list of payment records
"""
payments_data = {}
try:
df = pd.read_csv(file_path)
for _, row in df.iterrows():
email = str(row.get('user_email', '')).strip().lower()
if not email or email == 'nan':
continue
# Parse payment date
payment_date = parse_date_various(row.get('date'))
# Parse amount (convert to cents)
amount_str = str(row.get('amount', '0')).replace('$', '').replace(',', '').strip()
try:
amount_cents = int(float(amount_str) * 100)
except (ValueError, TypeError):
amount_cents = 0
# Map payment status
wp_status = str(row.get('status', '')).lower().strip()
if wp_status == 'completed':
payment_status = 'completed'
elif wp_status in ('pending', 'processing'):
payment_status = 'pending'
elif wp_status in ('failed', 'refunded'):
payment_status = 'failed'
else:
payment_status = 'completed' # Default for historical data
payment_record = {
'payment_id': row.get('payment_id'),
'amount_cents': amount_cents,
'status': payment_status,
'date': payment_date,
'payment_gateway': row.get('payment_gateway'),
'transaction_id': row.get('transaction_id'),
'profile_id': row.get('profile_id'),
'subscription_plan_id': row.get('subscription_plan_id'),
'wordpress_user_id': row.get('user_id'),
}
if email not in payments_data:
payments_data[email] = []
payments_data[email].append(payment_record)
except Exception as e:
logger.error(f"Error parsing payments CSV: {str(e)}")
raise
return payments_data
# ============================================================================
# Comprehensive Import Analysis
# ============================================================================
def analyze_comprehensive_import(
users_csv_path: str,
members_csv_path: Optional[str] = None,
payments_csv_path: Optional[str] = None,
existing_emails: Optional[set] = None
) -> Dict[str, Any]:
"""
Analyze all CSV files for comprehensive import with cross-referencing.
Args:
users_csv_path: Path to WordPress users export CSV (required)
members_csv_path: Path to PMS members CSV (optional)
payments_csv_path: Path to PMS payments CSV (optional)
existing_emails: Set of emails already in database
Returns:
Comprehensive analysis with preview data for all files
"""
if existing_emails is None:
existing_emails = set()
result = {
'users': {'total': 0, 'valid': 0, 'warnings': 0, 'errors': 0, 'preview': []},
'members': {'total': 0, 'matched': 0, 'unmatched': 0, 'data': {}},
'payments': {'total': 0, 'matched': 0, 'total_amount_cents': 0, 'data': {}},
'summary': {
'total_users': 0,
'importable_users': 0,
'duplicate_emails': 0,
'users_with_subscriptions': 0,
'users_with_payments': 0,
'total_payment_amount': 0,
}
}
# Parse members CSV if provided
members_data = {}
if members_csv_path:
try:
members_data = parse_members_csv(members_csv_path)
result['members']['total'] = len(members_data)
result['members']['data'] = members_data
except Exception as e:
result['members']['error'] = str(e)
# Parse payments CSV if provided
payments_data = {}
if payments_csv_path:
try:
payments_data = parse_payments_csv(payments_csv_path)
result['payments']['total'] = sum(len(p) for p in payments_data.values())
result['payments']['data'] = payments_data
result['payments']['total_amount_cents'] = sum(
sum(p['amount_cents'] for p in payments)
for payments in payments_data.values()
)
except Exception as e:
result['payments']['error'] = str(e)
# Parse users CSV
try:
df = pd.read_csv(users_csv_path)
result['users']['total'] = len(df)
seen_emails = set()
total_warnings = 0
total_errors = 0
for idx, row in df.iterrows():
row_dict = row.to_dict()
transformed = transform_csv_row_to_user_data(row_dict, existing_emails)
email = transformed['user_data'].get('email', '').lower()
# Check for CSV duplicates
if email in seen_emails:
transformed['errors'].append(f'Duplicate email in CSV')
elif email:
seen_emails.add(email)
# Cross-reference with members data
subscription_data = members_data.get(email)
if subscription_data:
result['members']['matched'] += 1
# Cross-reference with payments data
payment_records = payments_data.get(email, [])
if payment_records:
result['payments']['matched'] += 1
# Parse WordPress roles for role/status suggestion
wp_capabilities = row.get('wp_capabilities', '')
wp_roles = parse_php_serialized(wp_capabilities)
loaf_role, role_status = map_wordpress_role(wp_roles)
# Determine status
approval_status = str(row.get('wppb_approval_status', '')).strip()
has_subscription = 'pms_subscription_plan_63' in wp_roles or subscription_data is not None
if role_status:
suggested_status = role_status
else:
suggested_status = suggest_status(approval_status, has_subscription, loaf_role)
# Build preview row
preview_row = {
'row_number': idx + 1,
'email': email,
'first_name': transformed['user_data'].get('first_name', ''),
'last_name': transformed['user_data'].get('last_name', ''),
'phone': transformed['user_data'].get('phone', ''),
'date_of_birth': transformed['user_data'].get('date_of_birth').isoformat() if transformed['user_data'].get('date_of_birth') else None,
'wordpress_user_id': transformed['user_data'].get('wordpress_user_id'),
'wordpress_roles': wp_roles,
'suggested_role': loaf_role,
'suggested_status': suggested_status,
'has_subscription': has_subscription,
'subscription_data': subscription_data,
'payment_count': len(payment_records),
'total_paid_cents': sum(p['amount_cents'] for p in payment_records),
'user_data': transformed['user_data'],
'custom_data': transformed['custom_data'],
'newsletter_prefs': transformed['newsletter_prefs'],
'warnings': transformed['warnings'],
'errors': transformed['errors'],
}
result['users']['preview'].append(preview_row)
total_warnings += len(transformed['warnings'])
total_errors += len(transformed['errors'])
if not transformed['errors']:
result['users']['valid'] += 1
result['users']['warnings'] = total_warnings
result['users']['errors'] = total_errors
# Calculate unmatched members
user_emails = {p['email'] for p in result['users']['preview'] if p['email']}
result['members']['unmatched'] = len(set(members_data.keys()) - user_emails)
# Summary stats
result['summary']['total_users'] = result['users']['total']
result['summary']['importable_users'] = result['users']['valid']
result['summary']['duplicate_emails'] = len(seen_emails & existing_emails)
result['summary']['users_with_subscriptions'] = result['members']['matched']
result['summary']['users_with_payments'] = result['payments']['matched']
result['summary']['total_payment_amount'] = result['payments']['total_amount_cents']
except Exception as e:
logger.error(f"Error analyzing users CSV: {str(e)}")
result['users']['error'] = str(e)
raise
return result
# ============================================================================
# CSV Analysis and Preview Generation
# ============================================================================
@@ -1066,6 +344,8 @@ def analyze_csv(file_path: str, existing_emails: Optional[set] = None) -> Dict:
}
}
"""
import pandas as pd
# Read CSV with pandas
df = pd.read_csv(file_path)
@@ -1241,4 +521,11 @@ def format_preview_for_display(preview_data: List[Dict], page: int = 1, page_siz
# Module Initialization
# ============================================================================
# Import pandas for CSV processing
try:
import pandas as pd
except ImportError:
logger.error("pandas library not found. Please install: pip install pandas")
raise
logger.info("WordPress parser module loaded successfully")