Compare commits
44 Commits
38e5f5377a
...
loaf-prod
| Author | SHA1 | Date | |
|---|---|---|---|
| a807d97345 | |||
| e7f6e9c20a | |||
| 0cd5350a7b | |||
| dd41cf773b | |||
|
|
1c262c4804 | ||
|
|
a053075a30 | ||
|
|
6f8ec1d254 | ||
|
|
9754f2db6e | ||
|
|
03e5dd8bda | ||
|
|
ab0f098f99 | ||
|
|
ea87b3f6ee | ||
| 7d61eddcef | |||
|
|
b29bb641f5 | ||
|
|
d322d1334f | ||
|
|
ece1e62913 | ||
|
|
d3a0cabede | ||
|
|
e938baa78e | ||
| a5fc42b353 | |||
|
|
39324ba6f6 | ||
| 37b1ab75df | |||
|
|
adbfa7a3c8 | ||
|
|
a74f161efa | ||
|
|
d818d847bc | ||
|
|
1390e07500 | ||
| f915976cb3 | |||
| 9c5aafc57b | |||
| 3755a71ed8 | |||
| b2293a5588 | |||
| 9f29bf05d8 | |||
| b44d55919e | |||
| 1a6341a94c | |||
| 727cbf4b5c | |||
| 9c3f3c88b8 | |||
| 849a6a32af | |||
| 69b8185414 | |||
| f5f8ca8dc6 | |||
| 661a4cbb7c | |||
| a01a8b9915 | |||
| e126cb988c | |||
| fd988241a1 | |||
| c28eddca67 | |||
| e20542ccdc | |||
| b3f1f5f789 | |||
| 1da045f73f |
83
.dockerignore
Normal file
83
.dockerignore
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
# Git
|
||||||
|
.git
|
||||||
|
.gitignore
|
||||||
|
|
||||||
|
# Python
|
||||||
|
__pycache__
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
*.so
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
|
||||||
|
# Virtual environments
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env/
|
||||||
|
.venv/
|
||||||
|
|
||||||
|
# IDE
|
||||||
|
.idea/
|
||||||
|
.vscode/
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
|
*~
|
||||||
|
|
||||||
|
# Testing
|
||||||
|
.pytest_cache/
|
||||||
|
.coverage
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
|
||||||
|
# Environment files (will be mounted or passed via env vars)
|
||||||
|
.env
|
||||||
|
.env.local
|
||||||
|
.env.*.local
|
||||||
|
*.env
|
||||||
|
|
||||||
|
# Logs
|
||||||
|
*.log
|
||||||
|
logs/
|
||||||
|
|
||||||
|
# Database
|
||||||
|
*.db
|
||||||
|
*.sqlite3
|
||||||
|
|
||||||
|
# Alembic
|
||||||
|
alembic/versions/__pycache__/
|
||||||
|
|
||||||
|
# Docker
|
||||||
|
Dockerfile
|
||||||
|
docker-compose*.yml
|
||||||
|
.docker/
|
||||||
|
|
||||||
|
# Documentation
|
||||||
|
*.md
|
||||||
|
docs/
|
||||||
|
|
||||||
|
# Temporary files
|
||||||
|
tmp/
|
||||||
|
temp/
|
||||||
|
*.tmp
|
||||||
|
|
||||||
|
# OS files
|
||||||
|
.DS_Store
|
||||||
|
Thumbs.db
|
||||||
|
|
||||||
|
# Uploads (will be mounted as volume)
|
||||||
|
uploads/
|
||||||
13
.env.example
13
.env.example
@@ -6,6 +6,10 @@ JWT_SECRET=your-secret-key-change-this-in-production
|
|||||||
JWT_ALGORITHM=HS256
|
JWT_ALGORITHM=HS256
|
||||||
ACCESS_TOKEN_EXPIRE_MINUTES=30
|
ACCESS_TOKEN_EXPIRE_MINUTES=30
|
||||||
|
|
||||||
|
# Settings Encryption (for database-stored sensitive settings)
|
||||||
|
# Generate with: python -c "import secrets; print(secrets.token_urlsafe(64))"
|
||||||
|
SETTINGS_ENCRYPTION_KEY=your-encryption-key-generate-with-command-above
|
||||||
|
|
||||||
# SMTP Email Configuration (Port 465 - SSL/TLS)
|
# SMTP Email Configuration (Port 465 - SSL/TLS)
|
||||||
SMTP_HOST=p.konceptkit.com
|
SMTP_HOST=p.konceptkit.com
|
||||||
SMTP_PORT=465
|
SMTP_PORT=465
|
||||||
@@ -28,7 +32,14 @@ SMTP_FROM_NAME=LOAF Membership
|
|||||||
# Frontend URL
|
# Frontend URL
|
||||||
FRONTEND_URL=http://localhost:3000
|
FRONTEND_URL=http://localhost:3000
|
||||||
|
|
||||||
# Stripe Configuration (for future payment integration)
|
# Backend URL (for webhook URLs and API references)
|
||||||
|
# Used to construct Stripe webhook URL shown in Admin Settings
|
||||||
|
BACKEND_URL=http://localhost:8000
|
||||||
|
|
||||||
|
# Stripe Configuration (NOW DATABASE-DRIVEN via Admin Settings page)
|
||||||
|
# Configure Stripe credentials through the Admin Settings UI (requires SETTINGS_ENCRYPTION_KEY)
|
||||||
|
# No longer requires .env variables - managed through database for dynamic updates
|
||||||
|
# Legacy .env variables below are deprecated:
|
||||||
# STRIPE_SECRET_KEY=sk_test_...
|
# STRIPE_SECRET_KEY=sk_test_...
|
||||||
# STRIPE_WEBHOOK_SECRET=whsec_...
|
# STRIPE_WEBHOOK_SECRET=whsec_...
|
||||||
|
|
||||||
|
|||||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -1,5 +1,3 @@
|
|||||||
.env
|
|
||||||
.venv
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
# Python Backend .gitignore
|
# Python Backend .gitignore
|
||||||
# For FastAPI + PostgreSQL + Cloudflare R2 + Stripe
|
# For FastAPI + PostgreSQL + Cloudflare R2 + Stripe
|
||||||
@@ -10,7 +8,6 @@
|
|||||||
.env.*
|
.env.*
|
||||||
!.env.example
|
!.env.example
|
||||||
.envrc
|
.envrc
|
||||||
.sh
|
|
||||||
|
|
||||||
# ===== Python =====
|
# ===== Python =====
|
||||||
# Byte-compiled / optimized / DLL files
|
# Byte-compiled / optimized / DLL files
|
||||||
@@ -248,6 +245,9 @@ temp_uploads/
|
|||||||
tmp/
|
tmp/
|
||||||
temporary/
|
temporary/
|
||||||
|
|
||||||
|
# Generated SQL files (from scripts)
|
||||||
|
create_superadmin.sql
|
||||||
|
|
||||||
# CSV imports
|
# CSV imports
|
||||||
imports/*.csv
|
imports/*.csv
|
||||||
!imports/.gitkeep
|
!imports/.gitkeep
|
||||||
|
|||||||
42
Dockerfile
42
Dockerfile
@@ -1,20 +1,40 @@
|
|||||||
# Use an official Python image (Linux)
|
# Backend Dockerfile - FastAPI with Python
|
||||||
FROM python:3.12-slim
|
FROM python:3.11-slim
|
||||||
|
|
||||||
# Set a working directory
|
# Set environment variables
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE=1
|
||||||
|
ENV PYTHONUNBUFFERED=1
|
||||||
|
ENV PYTHONPATH=/app
|
||||||
|
|
||||||
|
# Set work directory
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Copy dependency list
|
# Install system dependencies
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
gcc \
|
||||||
|
libpq-dev \
|
||||||
|
curl \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Install Python dependencies
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
|
RUN pip install --no-cache-dir --upgrade pip && \
|
||||||
|
pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
# Install dependencies
|
# Copy application code
|
||||||
RUN pip3 install --no-cache-dir -r requirements.txt
|
|
||||||
|
|
||||||
# Copy the rest of the project
|
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
# Expose port (whatever your backend runs on)
|
# Create non-root user for security
|
||||||
|
RUN adduser --disabled-password --gecos '' appuser && \
|
||||||
|
chown -R appuser:appuser /app
|
||||||
|
USER appuser
|
||||||
|
|
||||||
|
# Expose port
|
||||||
EXPOSE 8000
|
EXPOSE 8000
|
||||||
|
|
||||||
# Run exactly your command
|
# Health check
|
||||||
CMD ["python", "-m", "uvicorn", "server:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
|
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||||
|
CMD curl -f http://localhost:8000/health || exit 1
|
||||||
|
|
||||||
|
# Run the application
|
||||||
|
CMD ["uvicorn", "server:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||||
|
|||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
141
add_directory_permissions.py
Normal file
141
add_directory_permissions.py
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Add Directory Permissions Script
|
||||||
|
|
||||||
|
This script adds the new directory.view and directory.manage permissions
|
||||||
|
without clearing existing permissions.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python add_directory_permissions.py
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from sqlalchemy import create_engine, text
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from database import Base
|
||||||
|
from models import Permission, RolePermission, Role, UserRole
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
# Load environment variables
|
||||||
|
load_dotenv()
|
||||||
|
|
||||||
|
# Database connection
|
||||||
|
DATABASE_URL = os.getenv("DATABASE_URL")
|
||||||
|
if not DATABASE_URL:
|
||||||
|
print("Error: DATABASE_URL environment variable not set")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
engine = create_engine(DATABASE_URL)
|
||||||
|
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||||
|
|
||||||
|
# New directory permissions
|
||||||
|
NEW_PERMISSIONS = [
|
||||||
|
{"code": "directory.view", "name": "View Directory Settings", "description": "View member directory field configuration", "module": "directory"},
|
||||||
|
{"code": "directory.manage", "name": "Manage Directory Fields", "description": "Enable/disable directory fields shown in Profile and Directory pages", "module": "directory"},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Roles that should have these permissions
|
||||||
|
ROLE_PERMISSION_MAP = {
|
||||||
|
"directory.view": ["admin", "superadmin"],
|
||||||
|
"directory.manage": ["admin", "superadmin"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def add_directory_permissions():
|
||||||
|
"""Add directory permissions and assign to appropriate roles"""
|
||||||
|
db = SessionLocal()
|
||||||
|
|
||||||
|
try:
|
||||||
|
print("=" * 60)
|
||||||
|
print("Adding Directory Permissions")
|
||||||
|
print("=" * 60)
|
||||||
|
|
||||||
|
# Step 1: Add permissions if they don't exist
|
||||||
|
print("\n1. Adding permissions...")
|
||||||
|
permission_map = {}
|
||||||
|
|
||||||
|
for perm_data in NEW_PERMISSIONS:
|
||||||
|
existing = db.query(Permission).filter(Permission.code == perm_data["code"]).first()
|
||||||
|
if existing:
|
||||||
|
print(f" - {perm_data['code']}: Already exists")
|
||||||
|
permission_map[perm_data["code"]] = existing
|
||||||
|
else:
|
||||||
|
permission = Permission(
|
||||||
|
code=perm_data["code"],
|
||||||
|
name=perm_data["name"],
|
||||||
|
description=perm_data["description"],
|
||||||
|
module=perm_data["module"]
|
||||||
|
)
|
||||||
|
db.add(permission)
|
||||||
|
db.flush() # Get the ID
|
||||||
|
permission_map[perm_data["code"]] = permission
|
||||||
|
print(f" - {perm_data['code']}: Created")
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
# Step 2: Get roles
|
||||||
|
print("\n2. Fetching roles...")
|
||||||
|
roles = db.query(Role).all()
|
||||||
|
role_map = {role.code: role for role in roles}
|
||||||
|
print(f" Found {len(roles)} roles: {', '.join(role_map.keys())}")
|
||||||
|
|
||||||
|
# Enum mapping for backward compatibility
|
||||||
|
role_enum_map = {
|
||||||
|
'guest': UserRole.guest,
|
||||||
|
'member': UserRole.member,
|
||||||
|
'admin': UserRole.admin,
|
||||||
|
'superadmin': UserRole.superadmin,
|
||||||
|
'finance': UserRole.finance
|
||||||
|
}
|
||||||
|
|
||||||
|
# Step 3: Assign permissions to roles
|
||||||
|
print("\n3. Assigning permissions to roles...")
|
||||||
|
for perm_code, role_codes in ROLE_PERMISSION_MAP.items():
|
||||||
|
permission = permission_map.get(perm_code)
|
||||||
|
if not permission:
|
||||||
|
print(f" Warning: Permission {perm_code} not found")
|
||||||
|
continue
|
||||||
|
|
||||||
|
for role_code in role_codes:
|
||||||
|
role = role_map.get(role_code)
|
||||||
|
if not role:
|
||||||
|
print(f" Warning: Role {role_code} not found")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Check if mapping already exists
|
||||||
|
existing_mapping = db.query(RolePermission).filter(
|
||||||
|
RolePermission.role_id == role.id,
|
||||||
|
RolePermission.permission_id == permission.id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if existing_mapping:
|
||||||
|
print(f" - {role_code} -> {perm_code}: Already assigned")
|
||||||
|
else:
|
||||||
|
role_enum = role_enum_map.get(role_code, UserRole.guest)
|
||||||
|
mapping = RolePermission(
|
||||||
|
role=role_enum,
|
||||||
|
role_id=role.id,
|
||||||
|
permission_id=permission.id
|
||||||
|
)
|
||||||
|
db.add(mapping)
|
||||||
|
print(f" - {role_code} -> {perm_code}: Assigned")
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
print("\n" + "=" * 60)
|
||||||
|
print("Directory permissions added successfully!")
|
||||||
|
print("=" * 60)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
db.rollback()
|
||||||
|
print(f"\nError: {str(e)}")
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
add_directory_permissions()
|
||||||
141
add_registration_permissions.py
Normal file
141
add_registration_permissions.py
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Add Registration Permissions Script
|
||||||
|
|
||||||
|
This script adds the new registration.view and registration.manage permissions
|
||||||
|
without clearing existing permissions.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python add_registration_permissions.py
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from sqlalchemy import create_engine, text
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from database import Base
|
||||||
|
from models import Permission, RolePermission, Role, UserRole
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
# Load environment variables
|
||||||
|
load_dotenv()
|
||||||
|
|
||||||
|
# Database connection
|
||||||
|
DATABASE_URL = os.getenv("DATABASE_URL")
|
||||||
|
if not DATABASE_URL:
|
||||||
|
print("Error: DATABASE_URL environment variable not set")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
engine = create_engine(DATABASE_URL)
|
||||||
|
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||||
|
|
||||||
|
# New registration permissions
|
||||||
|
NEW_PERMISSIONS = [
|
||||||
|
{"code": "registration.view", "name": "View Registration Settings", "description": "View registration form schema and settings", "module": "registration"},
|
||||||
|
{"code": "registration.manage", "name": "Manage Registration Form", "description": "Edit registration form schema, steps, and fields", "module": "registration"},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Roles that should have these permissions
|
||||||
|
ROLE_PERMISSION_MAP = {
|
||||||
|
"registration.view": ["admin", "superadmin"],
|
||||||
|
"registration.manage": ["admin", "superadmin"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def add_registration_permissions():
|
||||||
|
"""Add registration permissions and assign to appropriate roles"""
|
||||||
|
db = SessionLocal()
|
||||||
|
|
||||||
|
try:
|
||||||
|
print("=" * 60)
|
||||||
|
print("Adding Registration Permissions")
|
||||||
|
print("=" * 60)
|
||||||
|
|
||||||
|
# Step 1: Add permissions if they don't exist
|
||||||
|
print("\n1. Adding permissions...")
|
||||||
|
permission_map = {}
|
||||||
|
|
||||||
|
for perm_data in NEW_PERMISSIONS:
|
||||||
|
existing = db.query(Permission).filter(Permission.code == perm_data["code"]).first()
|
||||||
|
if existing:
|
||||||
|
print(f" - {perm_data['code']}: Already exists")
|
||||||
|
permission_map[perm_data["code"]] = existing
|
||||||
|
else:
|
||||||
|
permission = Permission(
|
||||||
|
code=perm_data["code"],
|
||||||
|
name=perm_data["name"],
|
||||||
|
description=perm_data["description"],
|
||||||
|
module=perm_data["module"]
|
||||||
|
)
|
||||||
|
db.add(permission)
|
||||||
|
db.flush() # Get the ID
|
||||||
|
permission_map[perm_data["code"]] = permission
|
||||||
|
print(f" - {perm_data['code']}: Created")
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
# Step 2: Get roles
|
||||||
|
print("\n2. Fetching roles...")
|
||||||
|
roles = db.query(Role).all()
|
||||||
|
role_map = {role.code: role for role in roles}
|
||||||
|
print(f" Found {len(roles)} roles: {', '.join(role_map.keys())}")
|
||||||
|
|
||||||
|
# Enum mapping for backward compatibility
|
||||||
|
role_enum_map = {
|
||||||
|
'guest': UserRole.guest,
|
||||||
|
'member': UserRole.member,
|
||||||
|
'admin': UserRole.admin,
|
||||||
|
'superadmin': UserRole.superadmin,
|
||||||
|
'finance': UserRole.finance
|
||||||
|
}
|
||||||
|
|
||||||
|
# Step 3: Assign permissions to roles
|
||||||
|
print("\n3. Assigning permissions to roles...")
|
||||||
|
for perm_code, role_codes in ROLE_PERMISSION_MAP.items():
|
||||||
|
permission = permission_map.get(perm_code)
|
||||||
|
if not permission:
|
||||||
|
print(f" Warning: Permission {perm_code} not found")
|
||||||
|
continue
|
||||||
|
|
||||||
|
for role_code in role_codes:
|
||||||
|
role = role_map.get(role_code)
|
||||||
|
if not role:
|
||||||
|
print(f" Warning: Role {role_code} not found")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Check if mapping already exists
|
||||||
|
existing_mapping = db.query(RolePermission).filter(
|
||||||
|
RolePermission.role_id == role.id,
|
||||||
|
RolePermission.permission_id == permission.id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if existing_mapping:
|
||||||
|
print(f" - {role_code} -> {perm_code}: Already assigned")
|
||||||
|
else:
|
||||||
|
role_enum = role_enum_map.get(role_code, UserRole.guest)
|
||||||
|
mapping = RolePermission(
|
||||||
|
role=role_enum,
|
||||||
|
role_id=role.id,
|
||||||
|
permission_id=permission.id
|
||||||
|
)
|
||||||
|
db.add(mapping)
|
||||||
|
print(f" - {role_code} -> {perm_code}: Assigned")
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
print("\n" + "=" * 60)
|
||||||
|
print("Registration permissions added successfully!")
|
||||||
|
print("=" * 60)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
db.rollback()
|
||||||
|
print(f"\nError: {str(e)}")
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
add_registration_permissions()
|
||||||
39
alembic/versions/014_add_custom_registration_data.py
Normal file
39
alembic/versions/014_add_custom_registration_data.py
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
"""add_custom_registration_data
|
||||||
|
|
||||||
|
Revision ID: 014_custom_registration
|
||||||
|
Revises: a1b2c3d4e5f6
|
||||||
|
Create Date: 2026-02-01 10:00:00.000000
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '014_custom_registration'
|
||||||
|
down_revision: Union[str, None] = 'a1b2c3d4e5f6'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Add custom_registration_data column to users table
|
||||||
|
# This stores dynamic registration field responses as JSON
|
||||||
|
op.add_column('users', sa.Column(
|
||||||
|
'custom_registration_data',
|
||||||
|
sa.JSON,
|
||||||
|
nullable=False,
|
||||||
|
server_default='{}'
|
||||||
|
))
|
||||||
|
|
||||||
|
# Add comment for documentation
|
||||||
|
op.execute("""
|
||||||
|
COMMENT ON COLUMN users.custom_registration_data IS
|
||||||
|
'Dynamic registration field responses stored as JSON for custom form fields';
|
||||||
|
""")
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_column('users', 'custom_registration_data')
|
||||||
48
alembic/versions/4fa11836f7fd_add_role_audit_fields.py
Normal file
48
alembic/versions/4fa11836f7fd_add_role_audit_fields.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
"""add_role_audit_fields
|
||||||
|
|
||||||
|
Revision ID: 4fa11836f7fd
|
||||||
|
Revises: 013_sync_permissions
|
||||||
|
Create Date: 2026-01-16 17:21:40.514605
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects.postgresql import UUID
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '4fa11836f7fd'
|
||||||
|
down_revision: Union[str, None] = '013_sync_permissions'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Add role audit trail columns
|
||||||
|
op.add_column('users', sa.Column('role_changed_at', sa.DateTime(timezone=True), nullable=True))
|
||||||
|
op.add_column('users', sa.Column('role_changed_by', UUID(as_uuid=True), nullable=True))
|
||||||
|
|
||||||
|
# Create foreign key constraint to track who changed the role
|
||||||
|
op.create_foreign_key(
|
||||||
|
'fk_users_role_changed_by',
|
||||||
|
'users', 'users',
|
||||||
|
['role_changed_by'], ['id'],
|
||||||
|
ondelete='SET NULL'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create index for efficient querying by role change date
|
||||||
|
op.create_index('idx_users_role_changed_at', 'users', ['role_changed_at'])
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Drop index first
|
||||||
|
op.drop_index('idx_users_role_changed_at')
|
||||||
|
|
||||||
|
# Drop foreign key constraint
|
||||||
|
op.drop_constraint('fk_users_role_changed_by', 'users', type_='foreignkey')
|
||||||
|
|
||||||
|
# Drop columns
|
||||||
|
op.drop_column('users', 'role_changed_by')
|
||||||
|
op.drop_column('users', 'role_changed_at')
|
||||||
@@ -0,0 +1,76 @@
|
|||||||
|
"""add_stripe_transaction_metadata
|
||||||
|
|
||||||
|
Revision ID: 956ea1628264
|
||||||
|
Revises: ec4cb4a49cde
|
||||||
|
Create Date: 2026-01-20 22:00:01.806931
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '956ea1628264'
|
||||||
|
down_revision: Union[str, None] = 'ec4cb4a49cde'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Add Stripe transaction metadata to subscriptions table
|
||||||
|
op.add_column('subscriptions', sa.Column('stripe_payment_intent_id', sa.String(), nullable=True))
|
||||||
|
op.add_column('subscriptions', sa.Column('stripe_charge_id', sa.String(), nullable=True))
|
||||||
|
op.add_column('subscriptions', sa.Column('stripe_invoice_id', sa.String(), nullable=True))
|
||||||
|
op.add_column('subscriptions', sa.Column('payment_completed_at', sa.DateTime(timezone=True), nullable=True))
|
||||||
|
op.add_column('subscriptions', sa.Column('card_last4', sa.String(4), nullable=True))
|
||||||
|
op.add_column('subscriptions', sa.Column('card_brand', sa.String(20), nullable=True))
|
||||||
|
op.add_column('subscriptions', sa.Column('stripe_receipt_url', sa.String(), nullable=True))
|
||||||
|
|
||||||
|
# Add indexes for Stripe transaction IDs in subscriptions
|
||||||
|
op.create_index('idx_subscriptions_payment_intent', 'subscriptions', ['stripe_payment_intent_id'])
|
||||||
|
op.create_index('idx_subscriptions_charge_id', 'subscriptions', ['stripe_charge_id'])
|
||||||
|
op.create_index('idx_subscriptions_invoice_id', 'subscriptions', ['stripe_invoice_id'])
|
||||||
|
|
||||||
|
# Add Stripe transaction metadata to donations table
|
||||||
|
op.add_column('donations', sa.Column('stripe_charge_id', sa.String(), nullable=True))
|
||||||
|
op.add_column('donations', sa.Column('stripe_customer_id', sa.String(), nullable=True))
|
||||||
|
op.add_column('donations', sa.Column('payment_completed_at', sa.DateTime(timezone=True), nullable=True))
|
||||||
|
op.add_column('donations', sa.Column('card_last4', sa.String(4), nullable=True))
|
||||||
|
op.add_column('donations', sa.Column('card_brand', sa.String(20), nullable=True))
|
||||||
|
op.add_column('donations', sa.Column('stripe_receipt_url', sa.String(), nullable=True))
|
||||||
|
|
||||||
|
# Add indexes for Stripe transaction IDs in donations
|
||||||
|
op.create_index('idx_donations_payment_intent', 'donations', ['stripe_payment_intent_id'])
|
||||||
|
op.create_index('idx_donations_charge_id', 'donations', ['stripe_charge_id'])
|
||||||
|
op.create_index('idx_donations_customer_id', 'donations', ['stripe_customer_id'])
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Remove indexes from donations
|
||||||
|
op.drop_index('idx_donations_customer_id', table_name='donations')
|
||||||
|
op.drop_index('idx_donations_charge_id', table_name='donations')
|
||||||
|
op.drop_index('idx_donations_payment_intent', table_name='donations')
|
||||||
|
|
||||||
|
# Remove columns from donations
|
||||||
|
op.drop_column('donations', 'stripe_receipt_url')
|
||||||
|
op.drop_column('donations', 'card_brand')
|
||||||
|
op.drop_column('donations', 'card_last4')
|
||||||
|
op.drop_column('donations', 'payment_completed_at')
|
||||||
|
op.drop_column('donations', 'stripe_customer_id')
|
||||||
|
op.drop_column('donations', 'stripe_charge_id')
|
||||||
|
|
||||||
|
# Remove indexes from subscriptions
|
||||||
|
op.drop_index('idx_subscriptions_invoice_id', table_name='subscriptions')
|
||||||
|
op.drop_index('idx_subscriptions_charge_id', table_name='subscriptions')
|
||||||
|
op.drop_index('idx_subscriptions_payment_intent', table_name='subscriptions')
|
||||||
|
|
||||||
|
# Remove columns from subscriptions
|
||||||
|
op.drop_column('subscriptions', 'stripe_receipt_url')
|
||||||
|
op.drop_column('subscriptions', 'card_brand')
|
||||||
|
op.drop_column('subscriptions', 'card_last4')
|
||||||
|
op.drop_column('subscriptions', 'payment_completed_at')
|
||||||
|
op.drop_column('subscriptions', 'stripe_invoice_id')
|
||||||
|
op.drop_column('subscriptions', 'stripe_charge_id')
|
||||||
|
op.drop_column('subscriptions', 'stripe_payment_intent_id')
|
||||||
100
alembic/versions/add_payment_methods.py
Normal file
100
alembic/versions/add_payment_methods.py
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
"""add_payment_methods
|
||||||
|
|
||||||
|
Revision ID: a1b2c3d4e5f6
|
||||||
|
Revises: 956ea1628264
|
||||||
|
Create Date: 2026-01-30 10:00:00.000000
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = 'a1b2c3d4e5f6'
|
||||||
|
down_revision: Union[str, None] = '956ea1628264'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
conn = op.get_bind()
|
||||||
|
|
||||||
|
# Create PaymentMethodType enum
|
||||||
|
paymentmethodtype = postgresql.ENUM(
|
||||||
|
'card', 'cash', 'bank_transfer', 'check',
|
||||||
|
name='paymentmethodtype',
|
||||||
|
create_type=False
|
||||||
|
)
|
||||||
|
paymentmethodtype.create(conn, checkfirst=True)
|
||||||
|
|
||||||
|
# Check if stripe_customer_id column exists on users table
|
||||||
|
result = conn.execute(sa.text("""
|
||||||
|
SELECT column_name FROM information_schema.columns
|
||||||
|
WHERE table_name = 'users' AND column_name = 'stripe_customer_id'
|
||||||
|
"""))
|
||||||
|
if result.fetchone() is None:
|
||||||
|
# Add stripe_customer_id to users table
|
||||||
|
op.add_column('users', sa.Column(
|
||||||
|
'stripe_customer_id',
|
||||||
|
sa.String(),
|
||||||
|
nullable=True,
|
||||||
|
comment='Stripe Customer ID for payment method management'
|
||||||
|
))
|
||||||
|
op.create_index('ix_users_stripe_customer_id', 'users', ['stripe_customer_id'])
|
||||||
|
|
||||||
|
# Check if payment_methods table exists
|
||||||
|
result = conn.execute(sa.text("""
|
||||||
|
SELECT table_name FROM information_schema.tables
|
||||||
|
WHERE table_name = 'payment_methods'
|
||||||
|
"""))
|
||||||
|
if result.fetchone() is None:
|
||||||
|
# Create payment_methods table
|
||||||
|
op.create_table(
|
||||||
|
'payment_methods',
|
||||||
|
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
|
||||||
|
sa.Column('user_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('users.id', ondelete='CASCADE'), nullable=False),
|
||||||
|
sa.Column('stripe_payment_method_id', sa.String(), nullable=True, unique=True, comment='Stripe pm_xxx reference'),
|
||||||
|
sa.Column('card_brand', sa.String(20), nullable=True, comment='Card brand: visa, mastercard, amex, etc.'),
|
||||||
|
sa.Column('card_last4', sa.String(4), nullable=True, comment='Last 4 digits of card'),
|
||||||
|
sa.Column('card_exp_month', sa.Integer(), nullable=True, comment='Card expiration month'),
|
||||||
|
sa.Column('card_exp_year', sa.Integer(), nullable=True, comment='Card expiration year'),
|
||||||
|
sa.Column('card_funding', sa.String(20), nullable=True, comment='Card funding type: credit, debit, prepaid'),
|
||||||
|
sa.Column('payment_type', paymentmethodtype, nullable=False, server_default='card'),
|
||||||
|
sa.Column('is_default', sa.Boolean(), nullable=False, server_default='false', comment='Whether this is the default payment method for auto-renewals'),
|
||||||
|
sa.Column('is_active', sa.Boolean(), nullable=False, server_default='true', comment='Soft delete flag - False means removed'),
|
||||||
|
sa.Column('is_manual', sa.Boolean(), nullable=False, server_default='false', comment='True for manually recorded methods (cash/check)'),
|
||||||
|
sa.Column('manual_notes', sa.Text(), nullable=True, comment='Admin notes for manual payment methods'),
|
||||||
|
sa.Column('created_by', postgresql.UUID(as_uuid=True), sa.ForeignKey('users.id', ondelete='SET NULL'), nullable=True, comment='Admin who added this on behalf of user'),
|
||||||
|
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||||
|
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now(), onupdate=sa.func.now()),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create indexes
|
||||||
|
op.create_index('ix_payment_methods_user_id', 'payment_methods', ['user_id'])
|
||||||
|
op.create_index('ix_payment_methods_stripe_pm_id', 'payment_methods', ['stripe_payment_method_id'])
|
||||||
|
op.create_index('idx_payment_method_user_default', 'payment_methods', ['user_id', 'is_default'])
|
||||||
|
op.create_index('idx_payment_method_active', 'payment_methods', ['user_id', 'is_active'])
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Drop indexes
|
||||||
|
op.drop_index('idx_payment_method_active', table_name='payment_methods')
|
||||||
|
op.drop_index('idx_payment_method_user_default', table_name='payment_methods')
|
||||||
|
op.drop_index('ix_payment_methods_stripe_pm_id', table_name='payment_methods')
|
||||||
|
op.drop_index('ix_payment_methods_user_id', table_name='payment_methods')
|
||||||
|
|
||||||
|
# Drop payment_methods table
|
||||||
|
op.drop_table('payment_methods')
|
||||||
|
|
||||||
|
# Drop stripe_customer_id from users
|
||||||
|
op.drop_index('ix_users_stripe_customer_id', table_name='users')
|
||||||
|
op.drop_column('users', 'stripe_customer_id')
|
||||||
|
|
||||||
|
# Drop PaymentMethodType enum
|
||||||
|
paymentmethodtype = postgresql.ENUM(
|
||||||
|
'card', 'cash', 'bank_transfer', 'check',
|
||||||
|
name='paymentmethodtype'
|
||||||
|
)
|
||||||
|
paymentmethodtype.drop(op.get_bind(), checkfirst=True)
|
||||||
68
alembic/versions/ec4cb4a49cde_add_system_settings_table.py
Normal file
68
alembic/versions/ec4cb4a49cde_add_system_settings_table.py
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
"""add_system_settings_table
|
||||||
|
|
||||||
|
Revision ID: ec4cb4a49cde
|
||||||
|
Revises: 4fa11836f7fd
|
||||||
|
Create Date: 2026-01-16 18:16:00.283455
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects.postgresql import UUID
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = 'ec4cb4a49cde'
|
||||||
|
down_revision: Union[str, None] = '4fa11836f7fd'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Create enum for setting types (only if not exists)
|
||||||
|
op.execute("""
|
||||||
|
DO $$ BEGIN
|
||||||
|
CREATE TYPE settingtype AS ENUM ('plaintext', 'encrypted', 'json');
|
||||||
|
EXCEPTION
|
||||||
|
WHEN duplicate_object THEN null;
|
||||||
|
END $$;
|
||||||
|
""")
|
||||||
|
|
||||||
|
# Create system_settings table
|
||||||
|
op.execute("""
|
||||||
|
CREATE TABLE system_settings (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
setting_key VARCHAR(100) UNIQUE NOT NULL,
|
||||||
|
setting_value TEXT,
|
||||||
|
setting_type settingtype NOT NULL DEFAULT 'plaintext'::settingtype,
|
||||||
|
description TEXT,
|
||||||
|
updated_by UUID REFERENCES users(id) ON DELETE SET NULL,
|
||||||
|
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
is_sensitive BOOLEAN NOT NULL DEFAULT FALSE
|
||||||
|
);
|
||||||
|
|
||||||
|
COMMENT ON COLUMN system_settings.setting_key IS 'Unique setting identifier (e.g., stripe_secret_key)';
|
||||||
|
COMMENT ON COLUMN system_settings.setting_value IS 'Setting value (encrypted if setting_type is encrypted)';
|
||||||
|
COMMENT ON COLUMN system_settings.setting_type IS 'Type of setting: plaintext, encrypted, or json';
|
||||||
|
COMMENT ON COLUMN system_settings.description IS 'Human-readable description of the setting';
|
||||||
|
COMMENT ON COLUMN system_settings.updated_by IS 'User who last updated this setting';
|
||||||
|
COMMENT ON COLUMN system_settings.is_sensitive IS 'Whether this setting contains sensitive data';
|
||||||
|
""")
|
||||||
|
|
||||||
|
# Create indexes
|
||||||
|
op.create_index('idx_system_settings_key', 'system_settings', ['setting_key'])
|
||||||
|
op.create_index('idx_system_settings_updated_at', 'system_settings', ['updated_at'])
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Drop indexes
|
||||||
|
op.drop_index('idx_system_settings_updated_at')
|
||||||
|
op.drop_index('idx_system_settings_key')
|
||||||
|
|
||||||
|
# Drop table
|
||||||
|
op.drop_table('system_settings')
|
||||||
|
|
||||||
|
# Drop enum
|
||||||
|
op.execute('DROP TYPE IF EXISTS settingtype')
|
||||||
4
auth.py
4
auth.py
@@ -128,7 +128,7 @@ async def get_current_admin_user(current_user: User = Depends(get_current_user))
|
|||||||
return current_user
|
return current_user
|
||||||
|
|
||||||
async def get_active_member(current_user: User = Depends(get_current_user)) -> User:
|
async def get_active_member(current_user: User = Depends(get_current_user)) -> User:
|
||||||
"""Require user to be active member with valid payment"""
|
"""Require user to be active member or staff with valid status"""
|
||||||
from models import UserStatus
|
from models import UserStatus
|
||||||
|
|
||||||
if current_user.status != UserStatus.active:
|
if current_user.status != UserStatus.active:
|
||||||
@@ -138,7 +138,7 @@ async def get_active_member(current_user: User = Depends(get_current_user)) -> U
|
|||||||
)
|
)
|
||||||
|
|
||||||
role_code = get_user_role_code(current_user)
|
role_code = get_user_role_code(current_user)
|
||||||
if role_code not in ["member", "admin", "superadmin"]:
|
if role_code not in ["member", "admin", "superadmin", "finance"]:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_403_FORBIDDEN,
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
detail="Member access only"
|
detail="Member access only"
|
||||||
|
|||||||
@@ -1,38 +1,15 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
"""
|
"""
|
||||||
Create Superadmin User Script
|
Create Superadmin User Script
|
||||||
Generates a superadmin user with hashed password for LOAF membership platform
|
Directly creates a superadmin user in the database for LOAF membership platform
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import bcrypt
|
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
from getpass import getpass
|
from getpass import getpass
|
||||||
|
|
||||||
def generate_password_hash(password: str) -> str:
|
# Add the backend directory to path for imports
|
||||||
"""Generate bcrypt hash for password"""
|
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||||
return bcrypt.hashpw(password.encode(), bcrypt.gensalt()).decode()
|
|
||||||
|
|
||||||
def generate_sql(email: str, password_hash: str, first_name: str, last_name: str) -> str:
|
|
||||||
"""Generate SQL INSERT statement"""
|
|
||||||
return f"""
|
|
||||||
-- Create Superadmin User
|
|
||||||
INSERT INTO users (
|
|
||||||
id, email, password_hash, first_name, last_name,
|
|
||||||
status, role, email_verified, created_at, updated_at
|
|
||||||
) VALUES (
|
|
||||||
gen_random_uuid(),
|
|
||||||
'{email}',
|
|
||||||
'{password_hash}',
|
|
||||||
'{first_name}',
|
|
||||||
'{last_name}',
|
|
||||||
'active',
|
|
||||||
'superadmin',
|
|
||||||
true,
|
|
||||||
NOW(),
|
|
||||||
NOW()
|
|
||||||
);
|
|
||||||
"""
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
print("=" * 70)
|
print("=" * 70)
|
||||||
@@ -40,6 +17,15 @@ def main():
|
|||||||
print("=" * 70)
|
print("=" * 70)
|
||||||
print()
|
print()
|
||||||
|
|
||||||
|
# Check for DATABASE_URL
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
load_dotenv()
|
||||||
|
|
||||||
|
database_url = os.getenv("DATABASE_URL")
|
||||||
|
if not database_url:
|
||||||
|
print("❌ DATABASE_URL not found in environment or .env file")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
# Get user input
|
# Get user input
|
||||||
email = input("Email address: ").strip()
|
email = input("Email address: ").strip()
|
||||||
if not email or '@' not in email:
|
if not email or '@' not in email:
|
||||||
@@ -68,31 +54,89 @@ def main():
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
print()
|
print()
|
||||||
print("Generating password hash...")
|
print("Creating superadmin user...")
|
||||||
password_hash = generate_password_hash(password)
|
|
||||||
|
|
||||||
print("✅ Password hash generated")
|
try:
|
||||||
print()
|
# Import database dependencies
|
||||||
print("=" * 70)
|
from sqlalchemy import create_engine, text
|
||||||
print("SQL STATEMENT")
|
from passlib.context import CryptContext
|
||||||
print("=" * 70)
|
|
||||||
|
|
||||||
sql = generate_sql(email, password_hash, first_name, last_name)
|
# Create password hash
|
||||||
print(sql)
|
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||||
|
password_hash = pwd_context.hash(password)
|
||||||
|
|
||||||
# Save to file
|
# Connect to database
|
||||||
output_file = "create_superadmin.sql"
|
engine = create_engine(database_url)
|
||||||
with open(output_file, 'w') as f:
|
|
||||||
f.write(sql)
|
|
||||||
|
|
||||||
print("=" * 70)
|
with engine.connect() as conn:
|
||||||
print(f"✅ SQL saved to: {output_file}")
|
# Check if user already exists
|
||||||
print()
|
result = conn.execute(
|
||||||
print("Run this command to create the user:")
|
text("SELECT id FROM users WHERE email = :email"),
|
||||||
print(f" psql -U postgres -d loaf_new -f {output_file}")
|
{"email": email}
|
||||||
print()
|
)
|
||||||
print("Or copy the SQL above and run it directly in psql")
|
if result.fetchone():
|
||||||
print("=" * 70)
|
print(f"❌ User with email '{email}' already exists")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Insert superadmin user
|
||||||
|
conn.execute(
|
||||||
|
text("""
|
||||||
|
INSERT INTO users (
|
||||||
|
id, email, password_hash, first_name, last_name,
|
||||||
|
phone, address, city, state, zipcode, date_of_birth,
|
||||||
|
status, role, email_verified,
|
||||||
|
newsletter_subscribed, accepts_tos,
|
||||||
|
created_at, updated_at
|
||||||
|
) VALUES (
|
||||||
|
gen_random_uuid(),
|
||||||
|
:email,
|
||||||
|
:password_hash,
|
||||||
|
:first_name,
|
||||||
|
:last_name,
|
||||||
|
'',
|
||||||
|
'',
|
||||||
|
'',
|
||||||
|
'',
|
||||||
|
'',
|
||||||
|
'1990-01-01',
|
||||||
|
'active',
|
||||||
|
'superadmin',
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
NOW(),
|
||||||
|
NOW()
|
||||||
|
)
|
||||||
|
"""),
|
||||||
|
{
|
||||||
|
"email": email,
|
||||||
|
"password_hash": password_hash,
|
||||||
|
"first_name": first_name,
|
||||||
|
"last_name": last_name
|
||||||
|
}
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
print()
|
||||||
|
print("=" * 70)
|
||||||
|
print("✅ Superadmin user created successfully!")
|
||||||
|
print("=" * 70)
|
||||||
|
print()
|
||||||
|
print(f" Email: {email}")
|
||||||
|
print(f" Name: {first_name} {last_name}")
|
||||||
|
print(f" Role: superadmin")
|
||||||
|
print(f" Status: active")
|
||||||
|
print()
|
||||||
|
print("You can now log in with these credentials.")
|
||||||
|
print("=" * 70)
|
||||||
|
|
||||||
|
except ImportError as e:
|
||||||
|
print(f"❌ Missing dependency: {e}")
|
||||||
|
print(" Run: pip install sqlalchemy psycopg2-binary passlib python-dotenv")
|
||||||
|
sys.exit(1)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ Database error: {e}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
try:
|
try:
|
||||||
@@ -100,6 +144,3 @@ if __name__ == "__main__":
|
|||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print("\n\n❌ Cancelled by user")
|
print("\n\n❌ Cancelled by user")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
except Exception as e:
|
|
||||||
print(f"\n❌ Error: {e}")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|||||||
17
database.py
17
database.py
@@ -1,6 +1,7 @@
|
|||||||
from sqlalchemy import create_engine
|
from sqlalchemy import create_engine
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from sqlalchemy.pool import QueuePool
|
||||||
import os
|
import os
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -10,7 +11,21 @@ load_dotenv(ROOT_DIR / '.env')
|
|||||||
|
|
||||||
DATABASE_URL = os.environ.get('DATABASE_URL', 'postgresql://user:password@localhost:5432/membership_db')
|
DATABASE_URL = os.environ.get('DATABASE_URL', 'postgresql://user:password@localhost:5432/membership_db')
|
||||||
|
|
||||||
engine = create_engine(DATABASE_URL)
|
# Configure engine with connection pooling and connection health checks
|
||||||
|
engine = create_engine(
|
||||||
|
DATABASE_URL,
|
||||||
|
poolclass=QueuePool,
|
||||||
|
pool_size=5, # Keep 5 connections open
|
||||||
|
max_overflow=10, # Allow up to 10 extra connections during peak
|
||||||
|
pool_pre_ping=True, # CRITICAL: Test connections before using them
|
||||||
|
pool_recycle=3600, # Recycle connections every hour (prevents stale connections)
|
||||||
|
echo=False, # Set to True for SQL debugging
|
||||||
|
connect_args={
|
||||||
|
'connect_timeout': 10, # Timeout connection attempts after 10 seconds
|
||||||
|
'options': '-c statement_timeout=30000' # 30 second query timeout
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||||
|
|
||||||
Base = declarative_base()
|
Base = declarative_base()
|
||||||
|
|||||||
@@ -1,14 +0,0 @@
|
|||||||
services:
|
|
||||||
backend:
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: Dockerfile # Use Dockerfile.prod for production
|
|
||||||
ports:
|
|
||||||
- "8000:8000"
|
|
||||||
env_file:
|
|
||||||
- .env
|
|
||||||
environment:
|
|
||||||
DATABASE_URL: ${DATABASE_URL}
|
|
||||||
volumes:
|
|
||||||
- .:/app # sync code for hot reload
|
|
||||||
|
|
||||||
122
encryption_service.py
Normal file
122
encryption_service.py
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
"""
|
||||||
|
Encryption service for sensitive settings stored in database.
|
||||||
|
|
||||||
|
Uses Fernet symmetric encryption (AES-128 in CBC mode with HMAC authentication).
|
||||||
|
The encryption key is derived from a master secret stored in .env.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import base64
|
||||||
|
from cryptography.fernet import Fernet
|
||||||
|
from cryptography.hazmat.primitives import hashes
|
||||||
|
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
||||||
|
from cryptography.hazmat.backends import default_backend
|
||||||
|
|
||||||
|
|
||||||
|
class EncryptionService:
|
||||||
|
"""Service for encrypting and decrypting sensitive configuration values"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
# Get master encryption key from environment
|
||||||
|
# This should be a long, random string (e.g., 64 characters)
|
||||||
|
# Generate one with: python -c "import secrets; print(secrets.token_urlsafe(64))"
|
||||||
|
self.master_secret = os.environ.get('SETTINGS_ENCRYPTION_KEY')
|
||||||
|
|
||||||
|
if not self.master_secret:
|
||||||
|
raise ValueError(
|
||||||
|
"SETTINGS_ENCRYPTION_KEY environment variable not set. "
|
||||||
|
"Generate one with: python -c \"import secrets; print(secrets.token_urlsafe(64))\""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Derive encryption key from master secret using PBKDF2HMAC
|
||||||
|
# This adds an extra layer of security
|
||||||
|
kdf = PBKDF2HMAC(
|
||||||
|
algorithm=hashes.SHA256(),
|
||||||
|
length=32,
|
||||||
|
salt=b'systemsettings', # Fixed salt (OK for key derivation from strong secret)
|
||||||
|
iterations=100000,
|
||||||
|
backend=default_backend()
|
||||||
|
)
|
||||||
|
key = base64.urlsafe_b64encode(kdf.derive(self.master_secret.encode()))
|
||||||
|
self.cipher = Fernet(key)
|
||||||
|
|
||||||
|
def encrypt(self, plaintext: str) -> str:
|
||||||
|
"""
|
||||||
|
Encrypt a plaintext string.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
plaintext: The string to encrypt
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Base64-encoded encrypted string
|
||||||
|
"""
|
||||||
|
if not plaintext:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
encrypted_bytes = self.cipher.encrypt(plaintext.encode())
|
||||||
|
return encrypted_bytes.decode('utf-8')
|
||||||
|
|
||||||
|
def decrypt(self, encrypted: str) -> str:
|
||||||
|
"""
|
||||||
|
Decrypt an encrypted string.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
encrypted: The base64-encoded encrypted string
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Decrypted plaintext string
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
cryptography.fernet.InvalidToken: If decryption fails (wrong key or corrupted data)
|
||||||
|
"""
|
||||||
|
if not encrypted:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
decrypted_bytes = self.cipher.decrypt(encrypted.encode())
|
||||||
|
return decrypted_bytes.decode('utf-8')
|
||||||
|
|
||||||
|
def is_encrypted(self, value: str) -> bool:
|
||||||
|
"""
|
||||||
|
Check if a value appears to be encrypted (starts with Fernet token format).
|
||||||
|
|
||||||
|
This is a heuristic check - not 100% reliable but useful for validation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
value: String to check
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if value looks like a Fernet token
|
||||||
|
"""
|
||||||
|
if not value:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Fernet tokens are base64-encoded and start with version byte (gAAAAA...)
|
||||||
|
# They're always > 60 characters
|
||||||
|
try:
|
||||||
|
return len(value) > 60 and value.startswith('gAAAAA')
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
# Global encryption service instance
|
||||||
|
# Initialize on module import so it fails fast if encryption key is missing
|
||||||
|
try:
|
||||||
|
encryption_service = EncryptionService()
|
||||||
|
except ValueError as e:
|
||||||
|
print(f"WARNING: {e}")
|
||||||
|
print("Encryption service will not be available.")
|
||||||
|
encryption_service = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_encryption_service() -> EncryptionService:
|
||||||
|
"""
|
||||||
|
Get the global encryption service instance.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If encryption service is not initialized (missing SETTINGS_ENCRYPTION_KEY)
|
||||||
|
"""
|
||||||
|
if encryption_service is None:
|
||||||
|
raise ValueError(
|
||||||
|
"Encryption service not initialized. Set SETTINGS_ENCRYPTION_KEY environment variable."
|
||||||
|
)
|
||||||
|
return encryption_service
|
||||||
@@ -94,6 +94,30 @@ BEGIN;
|
|||||||
-- SECTION 2: Create Core Tables
|
-- SECTION 2: Create Core Tables
|
||||||
-- ============================================================================
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- Import Jobs table (must be created before users due to FK reference)
|
||||||
|
CREATE TABLE IF NOT EXISTS import_jobs (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
|
||||||
|
filename VARCHAR NOT NULL,
|
||||||
|
status importjobstatus NOT NULL DEFAULT 'processing',
|
||||||
|
total_rows INTEGER DEFAULT 0,
|
||||||
|
processed_rows INTEGER DEFAULT 0,
|
||||||
|
success_count INTEGER DEFAULT 0,
|
||||||
|
error_count INTEGER DEFAULT 0,
|
||||||
|
error_log JSONB DEFAULT '[]'::jsonb,
|
||||||
|
|
||||||
|
-- WordPress import enhancements
|
||||||
|
field_mapping JSONB DEFAULT '{}'::jsonb,
|
||||||
|
wordpress_metadata JSONB DEFAULT '{}'::jsonb,
|
||||||
|
imported_user_ids JSONB DEFAULT '[]'::jsonb,
|
||||||
|
rollback_at TIMESTAMP WITH TIME ZONE,
|
||||||
|
rollback_by UUID, -- Will be updated with FK after users table exists
|
||||||
|
|
||||||
|
started_by UUID, -- Will be updated with FK after users table exists
|
||||||
|
started_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
completed_at TIMESTAMP WITH TIME ZONE
|
||||||
|
);
|
||||||
|
|
||||||
-- Users table
|
-- Users table
|
||||||
CREATE TABLE IF NOT EXISTS users (
|
CREATE TABLE IF NOT EXISTS users (
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
@@ -103,6 +127,7 @@ CREATE TABLE IF NOT EXISTS users (
|
|||||||
password_hash VARCHAR NOT NULL,
|
password_hash VARCHAR NOT NULL,
|
||||||
email_verified BOOLEAN NOT NULL DEFAULT FALSE,
|
email_verified BOOLEAN NOT NULL DEFAULT FALSE,
|
||||||
email_verification_token VARCHAR UNIQUE,
|
email_verification_token VARCHAR UNIQUE,
|
||||||
|
email_verification_expires TIMESTAMP WITH TIME ZONE,
|
||||||
|
|
||||||
-- Personal Information
|
-- Personal Information
|
||||||
first_name VARCHAR NOT NULL,
|
first_name VARCHAR NOT NULL,
|
||||||
@@ -113,7 +138,6 @@ CREATE TABLE IF NOT EXISTS users (
|
|||||||
state VARCHAR(2),
|
state VARCHAR(2),
|
||||||
zipcode VARCHAR(10),
|
zipcode VARCHAR(10),
|
||||||
date_of_birth DATE,
|
date_of_birth DATE,
|
||||||
bio TEXT,
|
|
||||||
|
|
||||||
-- Profile
|
-- Profile
|
||||||
profile_photo_url VARCHAR,
|
profile_photo_url VARCHAR,
|
||||||
@@ -137,20 +161,44 @@ CREATE TABLE IF NOT EXISTS users (
|
|||||||
-- Status & Role
|
-- Status & Role
|
||||||
status userstatus NOT NULL DEFAULT 'pending_email',
|
status userstatus NOT NULL DEFAULT 'pending_email',
|
||||||
role userrole NOT NULL DEFAULT 'guest',
|
role userrole NOT NULL DEFAULT 'guest',
|
||||||
role_id UUID, -- For dynamic RBAC (added in later migration)
|
role_id UUID, -- For dynamic RBAC
|
||||||
|
|
||||||
-- Rejection Tracking
|
-- Newsletter Preferences
|
||||||
rejection_reason TEXT,
|
newsletter_subscribed BOOLEAN DEFAULT TRUE,
|
||||||
rejected_at TIMESTAMP WITH TIME ZONE,
|
newsletter_publish_name BOOLEAN DEFAULT FALSE NOT NULL,
|
||||||
rejected_by UUID REFERENCES users(id),
|
newsletter_publish_photo BOOLEAN DEFAULT FALSE NOT NULL,
|
||||||
|
newsletter_publish_birthday BOOLEAN DEFAULT FALSE NOT NULL,
|
||||||
|
newsletter_publish_none BOOLEAN DEFAULT FALSE NOT NULL,
|
||||||
|
|
||||||
|
-- Volunteer Interests
|
||||||
|
volunteer_interests JSONB DEFAULT '[]'::jsonb,
|
||||||
|
|
||||||
|
-- Scholarship Request
|
||||||
|
scholarship_requested BOOLEAN DEFAULT FALSE NOT NULL,
|
||||||
|
scholarship_reason TEXT,
|
||||||
|
|
||||||
|
-- Directory Settings
|
||||||
|
show_in_directory BOOLEAN DEFAULT FALSE NOT NULL,
|
||||||
|
directory_email VARCHAR,
|
||||||
|
directory_bio TEXT,
|
||||||
|
directory_address VARCHAR,
|
||||||
|
directory_phone VARCHAR,
|
||||||
|
directory_dob DATE,
|
||||||
|
directory_partner_name VARCHAR,
|
||||||
|
|
||||||
|
-- Password Reset
|
||||||
|
password_reset_token VARCHAR,
|
||||||
|
password_reset_expires TIMESTAMP WITH TIME ZONE,
|
||||||
|
force_password_change BOOLEAN DEFAULT FALSE NOT NULL,
|
||||||
|
|
||||||
|
-- Terms of Service
|
||||||
|
accepts_tos BOOLEAN DEFAULT FALSE NOT NULL,
|
||||||
|
tos_accepted_at TIMESTAMP WITH TIME ZONE,
|
||||||
|
|
||||||
-- Membership
|
-- Membership
|
||||||
member_since DATE,
|
member_since DATE,
|
||||||
accepts_tos BOOLEAN DEFAULT FALSE,
|
|
||||||
tos_accepted_at TIMESTAMP WITH TIME ZONE,
|
|
||||||
newsletter_subscribed BOOLEAN DEFAULT TRUE,
|
|
||||||
|
|
||||||
-- Reminder Tracking (from migration 004)
|
-- Reminder Tracking
|
||||||
email_verification_reminders_sent INTEGER DEFAULT 0 NOT NULL,
|
email_verification_reminders_sent INTEGER DEFAULT 0 NOT NULL,
|
||||||
last_email_verification_reminder_at TIMESTAMP WITH TIME ZONE,
|
last_email_verification_reminder_at TIMESTAMP WITH TIME ZONE,
|
||||||
event_attendance_reminders_sent INTEGER DEFAULT 0 NOT NULL,
|
event_attendance_reminders_sent INTEGER DEFAULT 0 NOT NULL,
|
||||||
@@ -160,12 +208,21 @@ CREATE TABLE IF NOT EXISTS users (
|
|||||||
renewal_reminders_sent INTEGER DEFAULT 0 NOT NULL,
|
renewal_reminders_sent INTEGER DEFAULT 0 NOT NULL,
|
||||||
last_renewal_reminder_at TIMESTAMP WITH TIME ZONE,
|
last_renewal_reminder_at TIMESTAMP WITH TIME ZONE,
|
||||||
|
|
||||||
|
-- Rejection Tracking
|
||||||
|
rejection_reason TEXT,
|
||||||
|
rejected_at TIMESTAMP WITH TIME ZONE,
|
||||||
|
rejected_by UUID REFERENCES users(id),
|
||||||
|
|
||||||
-- WordPress Import Tracking
|
-- WordPress Import Tracking
|
||||||
import_source VARCHAR(50),
|
import_source VARCHAR(50),
|
||||||
import_job_id UUID REFERENCES import_jobs(id),
|
import_job_id UUID REFERENCES import_jobs(id),
|
||||||
wordpress_user_id BIGINT,
|
wordpress_user_id BIGINT,
|
||||||
wordpress_registered_date TIMESTAMP WITH TIME ZONE,
|
wordpress_registered_date TIMESTAMP WITH TIME ZONE,
|
||||||
|
|
||||||
|
-- Role Change Audit Trail
|
||||||
|
role_changed_at TIMESTAMP WITH TIME ZONE,
|
||||||
|
role_changed_by UUID REFERENCES users(id) ON DELETE SET NULL,
|
||||||
|
|
||||||
-- Timestamps
|
-- Timestamps
|
||||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||||
@@ -255,11 +312,23 @@ CREATE TABLE IF NOT EXISTS subscription_plans (
|
|||||||
name VARCHAR NOT NULL,
|
name VARCHAR NOT NULL,
|
||||||
description TEXT,
|
description TEXT,
|
||||||
price_cents INTEGER NOT NULL,
|
price_cents INTEGER NOT NULL,
|
||||||
billing_cycle VARCHAR NOT NULL DEFAULT 'annual',
|
billing_cycle VARCHAR NOT NULL DEFAULT 'yearly',
|
||||||
|
stripe_price_id VARCHAR, -- Legacy, deprecated
|
||||||
|
|
||||||
-- Configuration
|
-- Configuration
|
||||||
active BOOLEAN NOT NULL DEFAULT TRUE,
|
active BOOLEAN NOT NULL DEFAULT TRUE,
|
||||||
features JSONB DEFAULT '[]'::jsonb,
|
|
||||||
|
-- Custom billing cycle fields (for recurring date ranges like Jan 1 - Dec 31)
|
||||||
|
custom_cycle_enabled BOOLEAN DEFAULT FALSE NOT NULL,
|
||||||
|
custom_cycle_start_month INTEGER,
|
||||||
|
custom_cycle_start_day INTEGER,
|
||||||
|
custom_cycle_end_month INTEGER,
|
||||||
|
custom_cycle_end_day INTEGER,
|
||||||
|
|
||||||
|
-- Dynamic pricing fields
|
||||||
|
minimum_price_cents INTEGER DEFAULT 3000 NOT NULL,
|
||||||
|
suggested_price_cents INTEGER,
|
||||||
|
allow_donation BOOLEAN DEFAULT TRUE NOT NULL,
|
||||||
|
|
||||||
-- Timestamps
|
-- Timestamps
|
||||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
@@ -281,13 +350,21 @@ CREATE TABLE IF NOT EXISTS subscriptions (
|
|||||||
status subscriptionstatus DEFAULT 'active',
|
status subscriptionstatus DEFAULT 'active',
|
||||||
start_date TIMESTAMP WITH TIME ZONE NOT NULL,
|
start_date TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||||
end_date TIMESTAMP WITH TIME ZONE,
|
end_date TIMESTAMP WITH TIME ZONE,
|
||||||
next_billing_date TIMESTAMP WITH TIME ZONE,
|
|
||||||
|
|
||||||
-- Payment Details
|
-- Payment Details
|
||||||
amount_paid_cents INTEGER,
|
amount_paid_cents INTEGER,
|
||||||
base_subscription_cents INTEGER NOT NULL,
|
base_subscription_cents INTEGER NOT NULL,
|
||||||
donation_cents INTEGER DEFAULT 0 NOT NULL,
|
donation_cents INTEGER DEFAULT 0 NOT NULL,
|
||||||
|
|
||||||
|
-- Stripe transaction metadata (for validation and audit)
|
||||||
|
stripe_payment_intent_id VARCHAR,
|
||||||
|
stripe_charge_id VARCHAR,
|
||||||
|
stripe_invoice_id VARCHAR,
|
||||||
|
payment_completed_at TIMESTAMP WITH TIME ZONE,
|
||||||
|
card_last4 VARCHAR(4),
|
||||||
|
card_brand VARCHAR(20),
|
||||||
|
stripe_receipt_url VARCHAR,
|
||||||
|
|
||||||
-- Manual Payment Support
|
-- Manual Payment Support
|
||||||
manual_payment BOOLEAN DEFAULT FALSE NOT NULL,
|
manual_payment BOOLEAN DEFAULT FALSE NOT NULL,
|
||||||
manual_payment_notes TEXT,
|
manual_payment_notes TEXT,
|
||||||
@@ -319,6 +396,14 @@ CREATE TABLE IF NOT EXISTS donations (
|
|||||||
stripe_payment_intent_id VARCHAR,
|
stripe_payment_intent_id VARCHAR,
|
||||||
payment_method VARCHAR,
|
payment_method VARCHAR,
|
||||||
|
|
||||||
|
-- Stripe transaction metadata (for validation and audit)
|
||||||
|
stripe_charge_id VARCHAR,
|
||||||
|
stripe_customer_id VARCHAR,
|
||||||
|
payment_completed_at TIMESTAMP WITH TIME ZONE,
|
||||||
|
card_last4 VARCHAR(4),
|
||||||
|
card_brand VARCHAR(20),
|
||||||
|
stripe_receipt_url VARCHAR,
|
||||||
|
|
||||||
-- Metadata
|
-- Metadata
|
||||||
notes TEXT,
|
notes TEXT,
|
||||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||||
@@ -445,7 +530,7 @@ CREATE TABLE IF NOT EXISTS storage_usage (
|
|||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
|
||||||
total_bytes_used BIGINT NOT NULL DEFAULT 0,
|
total_bytes_used BIGINT NOT NULL DEFAULT 0,
|
||||||
max_bytes_allowed BIGINT NOT NULL DEFAULT 10737418240, -- 10GB
|
max_bytes_allowed BIGINT NOT NULL DEFAULT 1073741824, -- 1GB
|
||||||
last_updated TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
last_updated TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -466,29 +551,10 @@ CREATE TABLE IF NOT EXISTS user_invitations (
|
|||||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Import Jobs table
|
-- Add FK constraints to import_jobs (now that users table exists)
|
||||||
CREATE TABLE IF NOT EXISTS import_jobs (
|
ALTER TABLE import_jobs
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
ADD CONSTRAINT fk_import_jobs_rollback_by FOREIGN KEY (rollback_by) REFERENCES users(id),
|
||||||
|
ADD CONSTRAINT fk_import_jobs_started_by FOREIGN KEY (started_by) REFERENCES users(id);
|
||||||
filename VARCHAR NOT NULL,
|
|
||||||
status importjobstatus NOT NULL DEFAULT 'processing',
|
|
||||||
total_rows INTEGER DEFAULT 0,
|
|
||||||
processed_rows INTEGER DEFAULT 0,
|
|
||||||
success_count INTEGER DEFAULT 0,
|
|
||||||
error_count INTEGER DEFAULT 0,
|
|
||||||
error_log JSONB DEFAULT '[]'::jsonb,
|
|
||||||
|
|
||||||
-- WordPress import enhancements
|
|
||||||
field_mapping JSONB DEFAULT '{}'::jsonb,
|
|
||||||
wordpress_metadata JSONB DEFAULT '{}'::jsonb,
|
|
||||||
imported_user_ids JSONB DEFAULT '[]'::jsonb,
|
|
||||||
rollback_at TIMESTAMP WITH TIME ZONE,
|
|
||||||
rollback_by UUID REFERENCES users(id),
|
|
||||||
|
|
||||||
started_by UUID REFERENCES users(id),
|
|
||||||
started_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
completed_at TIMESTAMP WITH TIME ZONE
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Import Rollback Audit table (for tracking rollback operations)
|
-- Import Rollback Audit table (for tracking rollback operations)
|
||||||
CREATE TABLE IF NOT EXISTS import_rollback_audit (
|
CREATE TABLE IF NOT EXISTS import_rollback_audit (
|
||||||
@@ -542,12 +608,18 @@ CREATE INDEX IF NOT EXISTS idx_subscriptions_user_id ON subscriptions(user_id);
|
|||||||
CREATE INDEX IF NOT EXISTS idx_subscriptions_plan_id ON subscriptions(plan_id);
|
CREATE INDEX IF NOT EXISTS idx_subscriptions_plan_id ON subscriptions(plan_id);
|
||||||
CREATE INDEX IF NOT EXISTS idx_subscriptions_status ON subscriptions(status);
|
CREATE INDEX IF NOT EXISTS idx_subscriptions_status ON subscriptions(status);
|
||||||
CREATE INDEX IF NOT EXISTS idx_subscriptions_stripe_subscription_id ON subscriptions(stripe_subscription_id);
|
CREATE INDEX IF NOT EXISTS idx_subscriptions_stripe_subscription_id ON subscriptions(stripe_subscription_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_subscriptions_payment_intent ON subscriptions(stripe_payment_intent_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_subscriptions_charge_id ON subscriptions(stripe_charge_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_subscriptions_invoice_id ON subscriptions(stripe_invoice_id);
|
||||||
|
|
||||||
-- Donations indexes
|
-- Donations indexes
|
||||||
CREATE INDEX IF NOT EXISTS idx_donation_user ON donations(user_id);
|
CREATE INDEX IF NOT EXISTS idx_donation_user ON donations(user_id);
|
||||||
CREATE INDEX IF NOT EXISTS idx_donation_type ON donations(donation_type);
|
CREATE INDEX IF NOT EXISTS idx_donation_type ON donations(donation_type);
|
||||||
CREATE INDEX IF NOT EXISTS idx_donation_status ON donations(status);
|
CREATE INDEX IF NOT EXISTS idx_donation_status ON donations(status);
|
||||||
CREATE INDEX IF NOT EXISTS idx_donation_created ON donations(created_at);
|
CREATE INDEX IF NOT EXISTS idx_donation_created ON donations(created_at);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_donation_payment_intent ON donations(stripe_payment_intent_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_donation_charge_id ON donations(stripe_charge_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_donation_customer_id ON donations(stripe_customer_id);
|
||||||
|
|
||||||
-- Import Jobs indexes
|
-- Import Jobs indexes
|
||||||
CREATE INDEX IF NOT EXISTS idx_import_jobs_status ON import_jobs(status);
|
CREATE INDEX IF NOT EXISTS idx_import_jobs_status ON import_jobs(status);
|
||||||
@@ -587,7 +659,7 @@ INSERT INTO storage_usage (id, total_bytes_used, max_bytes_allowed, last_updated
|
|||||||
SELECT
|
SELECT
|
||||||
gen_random_uuid(),
|
gen_random_uuid(),
|
||||||
0,
|
0,
|
||||||
10737418240, -- 10GB
|
1073741824, -- 1GB
|
||||||
CURRENT_TIMESTAMP
|
CURRENT_TIMESTAMP
|
||||||
WHERE NOT EXISTS (SELECT 1 FROM storage_usage);
|
WHERE NOT EXISTS (SELECT 1 FROM storage_usage);
|
||||||
|
|
||||||
|
|||||||
117
models.py
117
models.py
@@ -44,6 +44,13 @@ class DonationStatus(enum.Enum):
|
|||||||
completed = "completed"
|
completed = "completed"
|
||||||
failed = "failed"
|
failed = "failed"
|
||||||
|
|
||||||
|
|
||||||
|
class PaymentMethodType(enum.Enum):
|
||||||
|
card = "card"
|
||||||
|
cash = "cash"
|
||||||
|
bank_transfer = "bank_transfer"
|
||||||
|
check = "check"
|
||||||
|
|
||||||
class User(Base):
|
class User(Base):
|
||||||
__tablename__ = "users"
|
__tablename__ = "users"
|
||||||
|
|
||||||
@@ -137,6 +144,17 @@ class User(Base):
|
|||||||
wordpress_user_id = Column(BigInteger, nullable=True, comment="Original WordPress user ID")
|
wordpress_user_id = Column(BigInteger, nullable=True, comment="Original WordPress user ID")
|
||||||
wordpress_registered_date = Column(DateTime(timezone=True), nullable=True, comment="Original WordPress registration date")
|
wordpress_registered_date = Column(DateTime(timezone=True), nullable=True, comment="Original WordPress registration date")
|
||||||
|
|
||||||
|
# Role Change Audit Trail
|
||||||
|
role_changed_at = Column(DateTime(timezone=True), nullable=True, comment="Timestamp when role was last changed")
|
||||||
|
role_changed_by = Column(UUID(as_uuid=True), ForeignKey('users.id', ondelete='SET NULL'), nullable=True, comment="Admin who changed the role")
|
||||||
|
|
||||||
|
# Stripe Customer ID - Centralized for payment method management
|
||||||
|
stripe_customer_id = Column(String, nullable=True, index=True, comment="Stripe Customer ID for payment method management")
|
||||||
|
|
||||||
|
# Dynamic Registration Form - Custom field responses
|
||||||
|
custom_registration_data = Column(JSON, default=dict, nullable=False,
|
||||||
|
comment="Dynamic registration field responses stored as JSON for custom form fields")
|
||||||
|
|
||||||
created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc))
|
created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc))
|
||||||
updated_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
|
updated_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
|
||||||
|
|
||||||
@@ -145,6 +163,53 @@ class User(Base):
|
|||||||
events_created = relationship("Event", back_populates="creator")
|
events_created = relationship("Event", back_populates="creator")
|
||||||
rsvps = relationship("EventRSVP", back_populates="user")
|
rsvps = relationship("EventRSVP", back_populates="user")
|
||||||
subscriptions = relationship("Subscription", back_populates="user", foreign_keys="Subscription.user_id")
|
subscriptions = relationship("Subscription", back_populates="user", foreign_keys="Subscription.user_id")
|
||||||
|
role_changer = relationship("User", foreign_keys=[role_changed_by], remote_side="User.id", post_update=True)
|
||||||
|
payment_methods = relationship("PaymentMethod", back_populates="user", foreign_keys="PaymentMethod.user_id")
|
||||||
|
|
||||||
|
|
||||||
|
class PaymentMethod(Base):
|
||||||
|
"""Stored payment methods for users (Stripe or manual records)"""
|
||||||
|
__tablename__ = "payment_methods"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||||
|
user_id = Column(UUID(as_uuid=True), ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True)
|
||||||
|
|
||||||
|
# Stripe payment method reference
|
||||||
|
stripe_payment_method_id = Column(String, nullable=True, unique=True, index=True, comment="Stripe pm_xxx reference")
|
||||||
|
|
||||||
|
# Card details (stored for display purposes - PCI compliant)
|
||||||
|
card_brand = Column(String(20), nullable=True, comment="Card brand: visa, mastercard, amex, etc.")
|
||||||
|
card_last4 = Column(String(4), nullable=True, comment="Last 4 digits of card")
|
||||||
|
card_exp_month = Column(Integer, nullable=True, comment="Card expiration month")
|
||||||
|
card_exp_year = Column(Integer, nullable=True, comment="Card expiration year")
|
||||||
|
card_funding = Column(String(20), nullable=True, comment="Card funding type: credit, debit, prepaid")
|
||||||
|
|
||||||
|
# Payment type classification
|
||||||
|
payment_type = Column(SQLEnum(PaymentMethodType), default=PaymentMethodType.card, nullable=False)
|
||||||
|
|
||||||
|
# Status flags
|
||||||
|
is_default = Column(Boolean, default=False, nullable=False, comment="Whether this is the default payment method for auto-renewals")
|
||||||
|
is_active = Column(Boolean, default=True, nullable=False, comment="Soft delete flag - False means removed")
|
||||||
|
is_manual = Column(Boolean, default=False, nullable=False, comment="True for manually recorded methods (cash/check)")
|
||||||
|
|
||||||
|
# Manual payment notes (for cash/check records)
|
||||||
|
manual_notes = Column(Text, nullable=True, comment="Admin notes for manual payment methods")
|
||||||
|
|
||||||
|
# Audit trail
|
||||||
|
created_by = Column(UUID(as_uuid=True), ForeignKey("users.id", ondelete="SET NULL"), nullable=True, comment="Admin who added this on behalf of user")
|
||||||
|
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), nullable=False)
|
||||||
|
updated_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc), nullable=False)
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
user = relationship("User", back_populates="payment_methods", foreign_keys=[user_id])
|
||||||
|
creator = relationship("User", foreign_keys=[created_by])
|
||||||
|
|
||||||
|
# Composite index for efficient queries
|
||||||
|
__table_args__ = (
|
||||||
|
Index('idx_payment_method_user_default', 'user_id', 'is_default'),
|
||||||
|
Index('idx_payment_method_active', 'user_id', 'is_active'),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Event(Base):
|
class Event(Base):
|
||||||
__tablename__ = "events"
|
__tablename__ = "events"
|
||||||
@@ -233,6 +298,15 @@ class Subscription(Base):
|
|||||||
donation_cents = Column(Integer, default=0, nullable=False) # Additional donation amount
|
donation_cents = Column(Integer, default=0, nullable=False) # Additional donation amount
|
||||||
# Note: amount_paid_cents = base_subscription_cents + donation_cents
|
# Note: amount_paid_cents = base_subscription_cents + donation_cents
|
||||||
|
|
||||||
|
# Stripe transaction metadata (for validation and audit)
|
||||||
|
stripe_payment_intent_id = Column(String, nullable=True, index=True) # Initial payment transaction ID
|
||||||
|
stripe_charge_id = Column(String, nullable=True, index=True) # Actual charge reference
|
||||||
|
stripe_invoice_id = Column(String, nullable=True, index=True) # Invoice reference
|
||||||
|
payment_completed_at = Column(DateTime(timezone=True), nullable=True) # Exact payment timestamp from Stripe
|
||||||
|
card_last4 = Column(String(4), nullable=True) # Last 4 digits of card
|
||||||
|
card_brand = Column(String(20), nullable=True) # Visa, Mastercard, etc.
|
||||||
|
stripe_receipt_url = Column(String, nullable=True) # Link to Stripe receipt
|
||||||
|
|
||||||
# Manual payment fields
|
# Manual payment fields
|
||||||
manual_payment = Column(Boolean, default=False, nullable=False) # Whether this was a manual offline payment
|
manual_payment = Column(Boolean, default=False, nullable=False) # Whether this was a manual offline payment
|
||||||
manual_payment_notes = Column(Text, nullable=True) # Admin notes about the payment
|
manual_payment_notes = Column(Text, nullable=True) # Admin notes about the payment
|
||||||
@@ -264,9 +338,17 @@ class Donation(Base):
|
|||||||
|
|
||||||
# Payment details
|
# Payment details
|
||||||
stripe_checkout_session_id = Column(String, nullable=True)
|
stripe_checkout_session_id = Column(String, nullable=True)
|
||||||
stripe_payment_intent_id = Column(String, nullable=True)
|
stripe_payment_intent_id = Column(String, nullable=True, index=True)
|
||||||
payment_method = Column(String, nullable=True) # card, bank_transfer, etc.
|
payment_method = Column(String, nullable=True) # card, bank_transfer, etc.
|
||||||
|
|
||||||
|
# Stripe transaction metadata (for validation and audit)
|
||||||
|
stripe_charge_id = Column(String, nullable=True, index=True) # Actual charge reference
|
||||||
|
stripe_customer_id = Column(String, nullable=True, index=True) # Customer ID if created
|
||||||
|
payment_completed_at = Column(DateTime(timezone=True), nullable=True) # Exact payment timestamp from Stripe
|
||||||
|
card_last4 = Column(String(4), nullable=True) # Last 4 digits of card
|
||||||
|
card_brand = Column(String(20), nullable=True) # Visa, Mastercard, etc.
|
||||||
|
stripe_receipt_url = Column(String, nullable=True) # Link to Stripe receipt
|
||||||
|
|
||||||
# Metadata
|
# Metadata
|
||||||
notes = Column(Text, nullable=True)
|
notes = Column(Text, nullable=True)
|
||||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||||
@@ -509,3 +591,36 @@ class ImportRollbackAudit(Base):
|
|||||||
# Relationships
|
# Relationships
|
||||||
import_job = relationship("ImportJob")
|
import_job = relationship("ImportJob")
|
||||||
admin_user = relationship("User", foreign_keys=[rolled_back_by])
|
admin_user = relationship("User", foreign_keys=[rolled_back_by])
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================
|
||||||
|
# System Settings Models
|
||||||
|
# ============================================================
|
||||||
|
|
||||||
|
class SettingType(enum.Enum):
|
||||||
|
plaintext = "plaintext"
|
||||||
|
encrypted = "encrypted"
|
||||||
|
json = "json"
|
||||||
|
|
||||||
|
|
||||||
|
class SystemSettings(Base):
|
||||||
|
"""System-wide configuration settings stored in database"""
|
||||||
|
__tablename__ = "system_settings"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||||
|
setting_key = Column(String(100), unique=True, nullable=False, index=True)
|
||||||
|
setting_value = Column(Text, nullable=True)
|
||||||
|
setting_type = Column(SQLEnum(SettingType), default=SettingType.plaintext, nullable=False)
|
||||||
|
description = Column(Text, nullable=True)
|
||||||
|
updated_by = Column(UUID(as_uuid=True), ForeignKey("users.id", ondelete="SET NULL"), nullable=True)
|
||||||
|
created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), nullable=False)
|
||||||
|
updated_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc), nullable=False)
|
||||||
|
is_sensitive = Column(Boolean, default=False, nullable=False)
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
updater = relationship("User", foreign_keys=[updated_by])
|
||||||
|
|
||||||
|
# Index on updated_at for audit queries
|
||||||
|
__table_args__ = (
|
||||||
|
Index('idx_system_settings_updated_at', 'updated_at'),
|
||||||
|
)
|
||||||
|
|||||||
@@ -11,11 +11,9 @@ from datetime import datetime, timezone, timedelta
|
|||||||
# Load environment variables
|
# Load environment variables
|
||||||
load_dotenv()
|
load_dotenv()
|
||||||
|
|
||||||
# Initialize Stripe with secret key
|
# NOTE: Stripe credentials are now database-driven
|
||||||
stripe.api_key = os.getenv("STRIPE_SECRET_KEY")
|
# These .env fallbacks are kept for backward compatibility only
|
||||||
|
# The actual credentials are loaded dynamically from system_settings table
|
||||||
# Stripe webhook secret for signature verification
|
|
||||||
STRIPE_WEBHOOK_SECRET = os.getenv("STRIPE_WEBHOOK_SECRET")
|
|
||||||
|
|
||||||
def create_checkout_session(
|
def create_checkout_session(
|
||||||
user_id: str,
|
user_id: str,
|
||||||
@@ -23,11 +21,15 @@ def create_checkout_session(
|
|||||||
plan_id: str,
|
plan_id: str,
|
||||||
stripe_price_id: str,
|
stripe_price_id: str,
|
||||||
success_url: str,
|
success_url: str,
|
||||||
cancel_url: str
|
cancel_url: str,
|
||||||
|
db = None
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Create a Stripe Checkout session for subscription payment.
|
Create a Stripe Checkout session for subscription payment.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db: Database session (optional, for reading Stripe credentials from database)
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
user_id: User's UUID
|
user_id: User's UUID
|
||||||
user_email: User's email address
|
user_email: User's email address
|
||||||
@@ -39,6 +41,28 @@ def create_checkout_session(
|
|||||||
Returns:
|
Returns:
|
||||||
dict: Checkout session object with session ID and URL
|
dict: Checkout session object with session ID and URL
|
||||||
"""
|
"""
|
||||||
|
# Load Stripe API key from database if available
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
# Import here to avoid circular dependency
|
||||||
|
from models import SystemSettings, SettingType
|
||||||
|
from encryption_service import get_encryption_service
|
||||||
|
|
||||||
|
setting = db.query(SystemSettings).filter(
|
||||||
|
SystemSettings.setting_key == 'stripe_secret_key'
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if setting and setting.setting_value:
|
||||||
|
encryption_service = get_encryption_service()
|
||||||
|
stripe.api_key = encryption_service.decrypt(setting.setting_value)
|
||||||
|
except Exception as e:
|
||||||
|
# Fallback to .env if database read fails
|
||||||
|
print(f"Failed to read Stripe key from database: {e}")
|
||||||
|
stripe.api_key = os.getenv("STRIPE_SECRET_KEY")
|
||||||
|
else:
|
||||||
|
# Fallback to .env if no db session
|
||||||
|
stripe.api_key = os.getenv("STRIPE_SECRET_KEY")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Create Checkout Session
|
# Create Checkout Session
|
||||||
checkout_session = stripe.checkout.Session.create(
|
checkout_session = stripe.checkout.Session.create(
|
||||||
@@ -74,13 +98,14 @@ def create_checkout_session(
|
|||||||
raise Exception(f"Stripe error: {str(e)}")
|
raise Exception(f"Stripe error: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
def verify_webhook_signature(payload: bytes, sig_header: str) -> dict:
|
def verify_webhook_signature(payload: bytes, sig_header: str, db=None) -> dict:
|
||||||
"""
|
"""
|
||||||
Verify Stripe webhook signature and construct event.
|
Verify Stripe webhook signature and construct event.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
payload: Raw webhook payload bytes
|
payload: Raw webhook payload bytes
|
||||||
sig_header: Stripe signature header
|
sig_header: Stripe signature header
|
||||||
|
db: Database session (optional, for reading webhook secret from database)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict: Verified webhook event
|
dict: Verified webhook event
|
||||||
@@ -88,9 +113,32 @@ def verify_webhook_signature(payload: bytes, sig_header: str) -> dict:
|
|||||||
Raises:
|
Raises:
|
||||||
ValueError: If signature verification fails
|
ValueError: If signature verification fails
|
||||||
"""
|
"""
|
||||||
|
# Load webhook secret from database if available
|
||||||
|
webhook_secret = None
|
||||||
|
if db:
|
||||||
|
try:
|
||||||
|
from models import SystemSettings
|
||||||
|
from encryption_service import get_encryption_service
|
||||||
|
|
||||||
|
setting = db.query(SystemSettings).filter(
|
||||||
|
SystemSettings.setting_key == 'stripe_webhook_secret'
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if setting and setting.setting_value:
|
||||||
|
encryption_service = get_encryption_service()
|
||||||
|
webhook_secret = encryption_service.decrypt(setting.setting_value)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Failed to read webhook secret from database: {e}")
|
||||||
|
webhook_secret = os.getenv("STRIPE_WEBHOOK_SECRET")
|
||||||
|
else:
|
||||||
|
webhook_secret = os.getenv("STRIPE_WEBHOOK_SECRET")
|
||||||
|
|
||||||
|
if not webhook_secret:
|
||||||
|
raise ValueError("STRIPE_WEBHOOK_SECRET not configured")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
event = stripe.Webhook.construct_event(
|
event = stripe.Webhook.construct_event(
|
||||||
payload, sig_header, STRIPE_WEBHOOK_SECRET
|
payload, sig_header, webhook_secret
|
||||||
)
|
)
|
||||||
return event
|
return event
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
|
|||||||
@@ -327,6 +327,38 @@ PERMISSIONS = [
|
|||||||
"module": "gallery"
|
"module": "gallery"
|
||||||
},
|
},
|
||||||
|
|
||||||
|
# ========== PAYMENT METHODS MODULE ==========
|
||||||
|
{
|
||||||
|
"code": "payment_methods.view",
|
||||||
|
"name": "View Payment Methods",
|
||||||
|
"description": "View user payment methods (masked)",
|
||||||
|
"module": "payment_methods"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": "payment_methods.view_sensitive",
|
||||||
|
"name": "View Sensitive Payment Details",
|
||||||
|
"description": "View full payment method details including Stripe IDs (requires password)",
|
||||||
|
"module": "payment_methods"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": "payment_methods.create",
|
||||||
|
"name": "Create Payment Methods",
|
||||||
|
"description": "Add payment methods on behalf of users",
|
||||||
|
"module": "payment_methods"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": "payment_methods.delete",
|
||||||
|
"name": "Delete Payment Methods",
|
||||||
|
"description": "Delete user payment methods",
|
||||||
|
"module": "payment_methods"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": "payment_methods.set_default",
|
||||||
|
"name": "Set Default Payment Method",
|
||||||
|
"description": "Set a user's default payment method",
|
||||||
|
"module": "payment_methods"
|
||||||
|
},
|
||||||
|
|
||||||
# ========== SETTINGS MODULE ==========
|
# ========== SETTINGS MODULE ==========
|
||||||
{
|
{
|
||||||
"code": "settings.view",
|
"code": "settings.view",
|
||||||
@@ -453,6 +485,10 @@ DEFAULT_ROLE_PERMISSIONS = {
|
|||||||
"gallery.edit",
|
"gallery.edit",
|
||||||
"gallery.delete",
|
"gallery.delete",
|
||||||
"gallery.moderate",
|
"gallery.moderate",
|
||||||
|
"payment_methods.view",
|
||||||
|
"payment_methods.create",
|
||||||
|
"payment_methods.delete",
|
||||||
|
"payment_methods.set_default",
|
||||||
"settings.view",
|
"settings.view",
|
||||||
"settings.edit",
|
"settings.edit",
|
||||||
"settings.email_templates",
|
"settings.email_templates",
|
||||||
@@ -460,6 +496,36 @@ DEFAULT_ROLE_PERMISSIONS = {
|
|||||||
"settings.logs",
|
"settings.logs",
|
||||||
],
|
],
|
||||||
|
|
||||||
|
UserRole.finance: [
|
||||||
|
# Finance role has all admin permissions plus sensitive payment access
|
||||||
|
"users.view",
|
||||||
|
"users.export",
|
||||||
|
"events.view",
|
||||||
|
"events.rsvps",
|
||||||
|
"events.calendar_export",
|
||||||
|
"subscriptions.view",
|
||||||
|
"subscriptions.create",
|
||||||
|
"subscriptions.edit",
|
||||||
|
"subscriptions.cancel",
|
||||||
|
"subscriptions.activate",
|
||||||
|
"subscriptions.plans",
|
||||||
|
"financials.view",
|
||||||
|
"financials.create",
|
||||||
|
"financials.edit",
|
||||||
|
"financials.delete",
|
||||||
|
"financials.export",
|
||||||
|
"financials.payments",
|
||||||
|
"newsletters.view",
|
||||||
|
"bylaws.view",
|
||||||
|
"gallery.view",
|
||||||
|
"payment_methods.view",
|
||||||
|
"payment_methods.view_sensitive", # Finance can view sensitive payment details
|
||||||
|
"payment_methods.create",
|
||||||
|
"payment_methods.delete",
|
||||||
|
"payment_methods.set_default",
|
||||||
|
"settings.view",
|
||||||
|
],
|
||||||
|
|
||||||
# Superadmin gets all permissions automatically in code,
|
# Superadmin gets all permissions automatically in code,
|
||||||
# so we don't need to explicitly assign them
|
# so we don't need to explicitly assign them
|
||||||
UserRole.superadmin: []
|
UserRole.superadmin: []
|
||||||
|
|||||||
@@ -35,6 +35,21 @@ class R2Storage:
|
|||||||
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': ['.xlsx']
|
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': ['.xlsx']
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Branding assets (logo and favicon)
|
||||||
|
ALLOWED_BRANDING_TYPES = {
|
||||||
|
'image/jpeg': ['.jpg', '.jpeg'],
|
||||||
|
'image/png': ['.png'],
|
||||||
|
'image/webp': ['.webp'],
|
||||||
|
'image/svg+xml': ['.svg']
|
||||||
|
}
|
||||||
|
|
||||||
|
ALLOWED_FAVICON_TYPES = {
|
||||||
|
'image/x-icon': ['.ico'],
|
||||||
|
'image/vnd.microsoft.icon': ['.ico'],
|
||||||
|
'image/png': ['.png'],
|
||||||
|
'image/svg+xml': ['.svg']
|
||||||
|
}
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
"""Initialize R2 client with credentials from environment"""
|
"""Initialize R2 client with credentials from environment"""
|
||||||
self.account_id = os.getenv('R2_ACCOUNT_ID')
|
self.account_id = os.getenv('R2_ACCOUNT_ID')
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ motor==3.3.1
|
|||||||
msal==1.27.0
|
msal==1.27.0
|
||||||
mypy==1.18.2
|
mypy==1.18.2
|
||||||
mypy_extensions==1.1.0
|
mypy_extensions==1.1.0
|
||||||
numpy==2.2.6
|
numpy==2.3.5
|
||||||
oauthlib==3.3.1
|
oauthlib==3.3.1
|
||||||
packaging==25.0
|
packaging==25.0
|
||||||
pandas==2.3.3
|
pandas==2.3.3
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
"""
|
"""
|
||||||
Permission Seeding Script for Dynamic RBAC System
|
Permission Seeding Script for Dynamic RBAC System
|
||||||
|
|
||||||
This script populates the database with 59 granular permissions and assigns them
|
This script populates the database with 65 granular permissions and assigns them
|
||||||
to the appropriate dynamic roles (not the old enum roles).
|
to the appropriate dynamic roles (not the old enum roles).
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
@@ -33,7 +33,7 @@ engine = create_engine(DATABASE_URL)
|
|||||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||||
|
|
||||||
# ============================================================
|
# ============================================================
|
||||||
# Permission Definitions (59 permissions across 10 modules)
|
# Permission Definitions (65 permissions across 11 modules)
|
||||||
# ============================================================
|
# ============================================================
|
||||||
|
|
||||||
PERMISSIONS = [
|
PERMISSIONS = [
|
||||||
@@ -116,6 +116,55 @@ PERMISSIONS = [
|
|||||||
{"code": "permissions.assign", "name": "Assign Permissions", "description": "Assign permissions to roles", "module": "permissions"},
|
{"code": "permissions.assign", "name": "Assign Permissions", "description": "Assign permissions to roles", "module": "permissions"},
|
||||||
{"code": "permissions.manage_roles", "name": "Manage Roles", "description": "Create and manage user roles", "module": "permissions"},
|
{"code": "permissions.manage_roles", "name": "Manage Roles", "description": "Create and manage user roles", "module": "permissions"},
|
||||||
{"code": "permissions.audit", "name": "View Permission Audit Log", "description": "View permission change audit logs", "module": "permissions"},
|
{"code": "permissions.audit", "name": "View Permission Audit Log", "description": "View permission change audit logs", "module": "permissions"},
|
||||||
|
|
||||||
|
# ========== PAYMENT METHODS MODULE (5) ==========
|
||||||
|
{"code": "payment_methods.view", "name": "View Payment Methods", "description": "View user payment methods (masked)", "module": "payment_methods"},
|
||||||
|
{"code": "payment_methods.view_sensitive", "name": "View Sensitive Payment Details", "description": "View full Stripe payment method IDs (requires password)", "module": "payment_methods"},
|
||||||
|
{"code": "payment_methods.create", "name": "Create Payment Methods", "description": "Add payment methods on behalf of users", "module": "payment_methods"},
|
||||||
|
{"code": "payment_methods.delete", "name": "Delete Payment Methods", "description": "Remove user payment methods", "module": "payment_methods"},
|
||||||
|
{"code": "payment_methods.set_default", "name": "Set Default Payment Method", "description": "Set default payment method for users", "module": "payment_methods"},
|
||||||
|
|
||||||
|
# ========== REGISTRATION MODULE (2) ==========
|
||||||
|
{"code": "registration.view", "name": "View Registration Settings", "description": "View registration form schema and settings", "module": "registration"},
|
||||||
|
{"code": "registration.manage", "name": "Manage Registration Form", "description": "Edit registration form schema, steps, and fields", "module": "registration"},
|
||||||
|
|
||||||
|
# ========== DIRECTORY MODULE (2) ==========
|
||||||
|
{"code": "directory.view", "name": "View Directory Settings", "description": "View member directory field configuration", "module": "directory"},
|
||||||
|
{"code": "directory.manage", "name": "Manage Directory Fields", "description": "Enable/disable directory fields shown in Profile and Directory pages", "module": "directory"},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Default system roles that must exist
|
||||||
|
DEFAULT_ROLES = [
|
||||||
|
{
|
||||||
|
"code": "guest",
|
||||||
|
"name": "Guest",
|
||||||
|
"description": "Default role for new registrations with no special permissions",
|
||||||
|
"is_system_role": True
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": "member",
|
||||||
|
"name": "Member",
|
||||||
|
"description": "Active paying members with access to member-only content",
|
||||||
|
"is_system_role": True
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": "finance",
|
||||||
|
"name": "Finance",
|
||||||
|
"description": "Financial management role with access to payments, subscriptions, and reports",
|
||||||
|
"is_system_role": True
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": "admin",
|
||||||
|
"name": "Admin",
|
||||||
|
"description": "Board members with full management access except RBAC",
|
||||||
|
"is_system_role": True
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": "superadmin",
|
||||||
|
"name": "Superadmin",
|
||||||
|
"description": "Full system access including RBAC management",
|
||||||
|
"is_system_role": True
|
||||||
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
# Default permission assignments for dynamic roles
|
# Default permission assignments for dynamic roles
|
||||||
@@ -136,6 +185,9 @@ DEFAULT_ROLE_PERMISSIONS = {
|
|||||||
"subscriptions.cancel", "subscriptions.activate", "subscriptions.plans",
|
"subscriptions.cancel", "subscriptions.activate", "subscriptions.plans",
|
||||||
"subscriptions.export",
|
"subscriptions.export",
|
||||||
"donations.view", "donations.export",
|
"donations.view", "donations.export",
|
||||||
|
# Payment methods - finance can view sensitive details
|
||||||
|
"payment_methods.view", "payment_methods.view_sensitive",
|
||||||
|
"payment_methods.create", "payment_methods.delete", "payment_methods.set_default",
|
||||||
],
|
],
|
||||||
|
|
||||||
"admin": [
|
"admin": [
|
||||||
@@ -157,6 +209,13 @@ DEFAULT_ROLE_PERMISSIONS = {
|
|||||||
"gallery.view", "gallery.upload", "gallery.edit", "gallery.delete", "gallery.moderate",
|
"gallery.view", "gallery.upload", "gallery.edit", "gallery.delete", "gallery.moderate",
|
||||||
"settings.view", "settings.edit", "settings.email_templates", "settings.storage",
|
"settings.view", "settings.edit", "settings.email_templates", "settings.storage",
|
||||||
"settings.logs",
|
"settings.logs",
|
||||||
|
# Payment methods - admin can manage but not view sensitive details
|
||||||
|
"payment_methods.view", "payment_methods.create",
|
||||||
|
"payment_methods.delete", "payment_methods.set_default",
|
||||||
|
# Registration form management
|
||||||
|
"registration.view", "registration.manage",
|
||||||
|
# Directory configuration
|
||||||
|
"directory.view", "directory.manage",
|
||||||
],
|
],
|
||||||
|
|
||||||
"superadmin": [
|
"superadmin": [
|
||||||
@@ -196,7 +255,34 @@ def seed_permissions():
|
|||||||
print(f"\n⚠️ WARNING: Tables not fully cleared! Stopping.")
|
print(f"\n⚠️ WARNING: Tables not fully cleared! Stopping.")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Step 2: Create permissions
|
# Step 2: Create default system roles
|
||||||
|
print(f"\n👤 Creating {len(DEFAULT_ROLES)} system roles...")
|
||||||
|
role_map = {}
|
||||||
|
|
||||||
|
for role_data in DEFAULT_ROLES:
|
||||||
|
# Check if role already exists
|
||||||
|
existing_role = db.query(Role).filter(Role.code == role_data["code"]).first()
|
||||||
|
if existing_role:
|
||||||
|
print(f" • {role_data['name']}: Already exists, updating...")
|
||||||
|
existing_role.name = role_data["name"]
|
||||||
|
existing_role.description = role_data["description"]
|
||||||
|
existing_role.is_system_role = role_data["is_system_role"]
|
||||||
|
role_map[role_data["code"]] = existing_role
|
||||||
|
else:
|
||||||
|
print(f" • {role_data['name']}: Creating...")
|
||||||
|
role = Role(
|
||||||
|
code=role_data["code"],
|
||||||
|
name=role_data["name"],
|
||||||
|
description=role_data["description"],
|
||||||
|
is_system_role=role_data["is_system_role"]
|
||||||
|
)
|
||||||
|
db.add(role)
|
||||||
|
role_map[role_data["code"]] = role
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
print(f"✓ Created/updated {len(DEFAULT_ROLES)} system roles")
|
||||||
|
|
||||||
|
# Step 3: Create permissions
|
||||||
print(f"\n📝 Creating {len(PERMISSIONS)} permissions...")
|
print(f"\n📝 Creating {len(PERMISSIONS)} permissions...")
|
||||||
permission_map = {} # Map code to permission object
|
permission_map = {} # Map code to permission object
|
||||||
|
|
||||||
@@ -213,13 +299,13 @@ def seed_permissions():
|
|||||||
db.commit()
|
db.commit()
|
||||||
print(f"✓ Created {len(PERMISSIONS)} permissions")
|
print(f"✓ Created {len(PERMISSIONS)} permissions")
|
||||||
|
|
||||||
# Step 3: Get all roles from database
|
# Step 4: Verify roles exist
|
||||||
print("\n🔍 Fetching dynamic roles...")
|
print("\n🔍 Verifying dynamic roles...")
|
||||||
roles = db.query(Role).all()
|
roles = db.query(Role).all()
|
||||||
role_map = {role.code: role for role in roles}
|
role_map = {role.code: role for role in roles}
|
||||||
print(f"✓ Found {len(roles)} roles: {', '.join(role_map.keys())}")
|
print(f"✓ Found {len(roles)} roles: {', '.join(role_map.keys())}")
|
||||||
|
|
||||||
# Step 4: Assign permissions to roles
|
# Step 5: Assign permissions to roles
|
||||||
print("\n🔐 Assigning permissions to roles...")
|
print("\n🔐 Assigning permissions to roles...")
|
||||||
|
|
||||||
from models import UserRole # Import for enum mapping
|
from models import UserRole # Import for enum mapping
|
||||||
@@ -258,7 +344,7 @@ def seed_permissions():
|
|||||||
db.commit()
|
db.commit()
|
||||||
print(f" ✓ {role.name}: Assigned {len(permission_codes)} permissions")
|
print(f" ✓ {role.name}: Assigned {len(permission_codes)} permissions")
|
||||||
|
|
||||||
# Step 5: Summary
|
# Step 6: Summary
|
||||||
print("\n" + "=" * 80)
|
print("\n" + "=" * 80)
|
||||||
print("📊 SEEDING SUMMARY")
|
print("📊 SEEDING SUMMARY")
|
||||||
print("=" * 80)
|
print("=" * 80)
|
||||||
@@ -273,7 +359,8 @@ def seed_permissions():
|
|||||||
for module, count in sorted(modules.items()):
|
for module, count in sorted(modules.items()):
|
||||||
print(f" • {module.capitalize()}: {count} permissions")
|
print(f" • {module.capitalize()}: {count} permissions")
|
||||||
|
|
||||||
print(f"\nTotal permissions created: {len(PERMISSIONS)}")
|
print(f"\nTotal system roles created: {len(DEFAULT_ROLES)}")
|
||||||
|
print(f"Total permissions created: {len(PERMISSIONS)}")
|
||||||
print(f"Total role-permission mappings: {total_assigned}")
|
print(f"Total role-permission mappings: {total_assigned}")
|
||||||
print("\n✅ Permission seeding completed successfully!")
|
print("\n✅ Permission seeding completed successfully!")
|
||||||
print("\nNext step: Restart backend server")
|
print("\nNext step: Restart backend server")
|
||||||
|
|||||||
Reference in New Issue
Block a user