forked from andika/membership-be
Compare commits
56 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| e7f6e9c20a | |||
| 0cd5350a7b | |||
| dd41cf773b | |||
|
|
1c262c4804 | ||
|
|
a053075a30 | ||
|
|
6f8ec1d254 | ||
|
|
9754f2db6e | ||
|
|
03e5dd8bda | ||
|
|
ab0f098f99 | ||
|
|
ea87b3f6ee | ||
|
|
b29bb641f5 | ||
|
|
d322d1334f | ||
|
|
ece1e62913 | ||
|
|
d3a0cabede | ||
|
|
e938baa78e | ||
|
|
39324ba6f6 | ||
|
|
adbfa7a3c8 | ||
|
|
a74f161efa | ||
|
|
d818d847bc | ||
|
|
1390e07500 | ||
|
|
810366d00f | ||
|
|
314380eec6 | ||
|
|
2b82f4acd8 | ||
|
|
cca694766b | ||
|
|
0171546bba | ||
|
|
96aca7d39b | ||
|
|
82319509c3 | ||
|
|
2547758864 | ||
|
|
669d78beb5 | ||
|
|
df789612e8 | ||
|
|
9266521bf1 | ||
|
|
c8f4040244 | ||
|
|
c848d4240a | ||
|
|
6784148058 | ||
|
|
340f838925 | ||
|
|
d967d1934e | ||
|
|
03ae921a5f | ||
|
|
d5f2373143 | ||
|
|
7ad5bfb1e5 | ||
|
|
f9bfdfa879 | ||
|
|
53bf84ba57 | ||
|
|
25b1cb916a | ||
|
|
cd8b4dcaa6 | ||
|
|
efc2002a67 | ||
|
|
6ec0745966 | ||
|
|
85199958bc | ||
|
|
487481b322 | ||
| fad23c6e57 | |||
|
|
91c7fc01e6 | ||
| 3bcc69f3a2 | |||
|
|
db13f0e9de | ||
|
|
b7ab1a897f | ||
|
|
1050abd830 | ||
|
|
f1798ea922 | ||
|
|
050cccae8f | ||
|
|
ed5526e27b |
83
.dockerignore
Normal file
83
.dockerignore
Normal file
@@ -0,0 +1,83 @@
|
||||
# Git
|
||||
.git
|
||||
.gitignore
|
||||
|
||||
# Python
|
||||
__pycache__
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# Virtual environments
|
||||
venv/
|
||||
ENV/
|
||||
env/
|
||||
.venv/
|
||||
|
||||
# IDE
|
||||
.idea/
|
||||
.vscode/
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
|
||||
# Testing
|
||||
.pytest_cache/
|
||||
.coverage
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
|
||||
# Environment files (will be mounted or passed via env vars)
|
||||
.env
|
||||
.env.local
|
||||
.env.*.local
|
||||
*.env
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
logs/
|
||||
|
||||
# Database
|
||||
*.db
|
||||
*.sqlite3
|
||||
|
||||
# Alembic
|
||||
alembic/versions/__pycache__/
|
||||
|
||||
# Docker
|
||||
Dockerfile
|
||||
docker-compose*.yml
|
||||
.docker/
|
||||
|
||||
# Documentation
|
||||
*.md
|
||||
docs/
|
||||
|
||||
# Temporary files
|
||||
tmp/
|
||||
temp/
|
||||
*.tmp
|
||||
|
||||
# OS files
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Uploads (will be mounted as volume)
|
||||
uploads/
|
||||
13
.env.example
13
.env.example
@@ -6,6 +6,10 @@ JWT_SECRET=your-secret-key-change-this-in-production
|
||||
JWT_ALGORITHM=HS256
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES=30
|
||||
|
||||
# Settings Encryption (for database-stored sensitive settings)
|
||||
# Generate with: python -c "import secrets; print(secrets.token_urlsafe(64))"
|
||||
SETTINGS_ENCRYPTION_KEY=your-encryption-key-generate-with-command-above
|
||||
|
||||
# SMTP Email Configuration (Port 465 - SSL/TLS)
|
||||
SMTP_HOST=p.konceptkit.com
|
||||
SMTP_PORT=465
|
||||
@@ -28,7 +32,14 @@ SMTP_FROM_NAME=LOAF Membership
|
||||
# Frontend URL
|
||||
FRONTEND_URL=http://localhost:3000
|
||||
|
||||
# Stripe Configuration (for future payment integration)
|
||||
# Backend URL (for webhook URLs and API references)
|
||||
# Used to construct Stripe webhook URL shown in Admin Settings
|
||||
BACKEND_URL=http://localhost:8000
|
||||
|
||||
# Stripe Configuration (NOW DATABASE-DRIVEN via Admin Settings page)
|
||||
# Configure Stripe credentials through the Admin Settings UI (requires SETTINGS_ENCRYPTION_KEY)
|
||||
# No longer requires .env variables - managed through database for dynamic updates
|
||||
# Legacy .env variables below are deprecated:
|
||||
# STRIPE_SECRET_KEY=sk_test_...
|
||||
# STRIPE_WEBHOOK_SECRET=whsec_...
|
||||
|
||||
|
||||
308
.gitignore
vendored
308
.gitignore
vendored
@@ -1 +1,309 @@
|
||||
# ============================================================================
|
||||
# Python Backend .gitignore
|
||||
# For FastAPI + PostgreSQL + Cloudflare R2 + Stripe
|
||||
# ============================================================================
|
||||
|
||||
# ===== Environment Variables =====
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
.envrc
|
||||
|
||||
# ===== Python =====
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff (if ever added):
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff (if ever added):
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff (if ever added):
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# pipenv
|
||||
Pipfile.lock
|
||||
|
||||
# poetry
|
||||
poetry.lock
|
||||
|
||||
# pdm
|
||||
.pdm.toml
|
||||
|
||||
# PEP 582
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# ===== Database =====
|
||||
# SQLite (development)
|
||||
*.db
|
||||
*.sqlite
|
||||
*.sqlite3
|
||||
|
||||
# PostgreSQL dumps
|
||||
*.sql.gz
|
||||
*.dump
|
||||
|
||||
# Database backups
|
||||
backups/
|
||||
*.backup
|
||||
|
||||
# ===== Alembic Migrations =====
|
||||
# Keep migration files but ignore bytecode
|
||||
alembic/__pycache__/
|
||||
alembic/versions/__pycache__/
|
||||
# Keep alembic.ini, env.py, and all migration files in alembic/versions/
|
||||
|
||||
# ===== IDE / Editors =====
|
||||
# VSCode
|
||||
.vscode/
|
||||
*.code-workspace
|
||||
|
||||
# PyCharm
|
||||
.idea/
|
||||
*.iml
|
||||
*.ipr
|
||||
*.iws
|
||||
|
||||
# Sublime Text
|
||||
*.sublime-project
|
||||
*.sublime-workspace
|
||||
|
||||
# Vim
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
.netrwhist
|
||||
|
||||
# Emacs
|
||||
*~
|
||||
\#*\#
|
||||
/.emacs.desktop
|
||||
/.emacs.desktop.lock
|
||||
*.elc
|
||||
|
||||
# Eclipse
|
||||
.project
|
||||
.pydevproject
|
||||
.settings/
|
||||
|
||||
# ===== Operating System =====
|
||||
# macOS
|
||||
.DS_Store
|
||||
.AppleDouble
|
||||
.LSOverride
|
||||
._*
|
||||
.DocumentRevisions-V100
|
||||
.fseventsd
|
||||
.Spotlight-V100
|
||||
.TemporaryItems
|
||||
.Trashes
|
||||
.VolumeIcon.icns
|
||||
.com.apple.timemachine.donotpresent
|
||||
|
||||
# Windows
|
||||
Thumbs.db
|
||||
Thumbs.db:encryptable
|
||||
ehthumbs.db
|
||||
ehthumbs_vista.db
|
||||
*.stackdump
|
||||
[Dd]esktop.ini
|
||||
$RECYCLE.BIN/
|
||||
*.cab
|
||||
*.msi
|
||||
*.msix
|
||||
*.msm
|
||||
*.msp
|
||||
*.lnk
|
||||
|
||||
# Linux
|
||||
.directory
|
||||
.Trash-*
|
||||
.nfs*
|
||||
|
||||
# ===== Logs & Runtime =====
|
||||
*.log
|
||||
logs/
|
||||
*.out
|
||||
*.err
|
||||
*.pid
|
||||
*.seed
|
||||
*.pid.lock
|
||||
|
||||
# ===== Application-Specific =====
|
||||
# Uploaded files (R2 storage handles this)
|
||||
uploads/
|
||||
temp_uploads/
|
||||
tmp/
|
||||
temporary/
|
||||
|
||||
# Generated SQL files (from scripts)
|
||||
create_superadmin.sql
|
||||
|
||||
# CSV imports
|
||||
imports/*.csv
|
||||
!imports/.gitkeep
|
||||
|
||||
# Generated reports
|
||||
reports/
|
||||
exports/
|
||||
|
||||
# Cache directories
|
||||
.cache/
|
||||
cache/
|
||||
|
||||
# ===== Security & Secrets =====
|
||||
# API keys and secrets
|
||||
secrets/
|
||||
*.pem
|
||||
*.key
|
||||
*.cert
|
||||
*.crt
|
||||
*.p12
|
||||
*.pfx
|
||||
|
||||
# Stripe webhook secrets
|
||||
stripe_*.txt
|
||||
|
||||
# ===== Testing =====
|
||||
# Test databases
|
||||
test.db
|
||||
test_*.db
|
||||
|
||||
# Test coverage
|
||||
htmlcov/
|
||||
.coverage
|
||||
|
||||
# ===== Miscellaneous =====
|
||||
# Backup files
|
||||
*.bak
|
||||
*.backup
|
||||
*.old
|
||||
*.orig
|
||||
|
||||
# Compressed files
|
||||
*.zip
|
||||
*.tar.gz
|
||||
*.rar
|
||||
|
||||
# Temporary files
|
||||
*.tmp
|
||||
*.temp
|
||||
|
||||
# Lock files
|
||||
*.lock
|
||||
!requirements.txt.lock
|
||||
|
||||
# ===== Keep These =====
|
||||
# Keep these example/template files
|
||||
!.env.example
|
||||
!migrations/.gitkeep
|
||||
!uploads/.gitkeep
|
||||
|
||||
247
DATABASE_STATUS.md
Normal file
247
DATABASE_STATUS.md
Normal file
@@ -0,0 +1,247 @@
|
||||
# Database Status - LOAF Membership Platform
|
||||
|
||||
**Database:** `loaf_new`
|
||||
**Host:** 10.9.23.11:54321
|
||||
**Last Updated:** 2026-01-03
|
||||
**Status:** ✅ Fully initialized with seed data
|
||||
|
||||
---
|
||||
|
||||
## Database Summary
|
||||
|
||||
### Tables (18 total)
|
||||
|
||||
| Table Name | Status | Records | Purpose |
|
||||
|------------|--------|---------|---------|
|
||||
| ✅ alembic_version | Active | 1 | Migration tracking (001_initial_baseline) |
|
||||
| ✅ users | Active | 0 | User accounts and profiles |
|
||||
| ✅ events | Active | 0 | Event management |
|
||||
| ✅ event_rsvps | Active | 0 | Event RSVPs and attendance |
|
||||
| ✅ event_galleries | Active | 0 | Event photo galleries |
|
||||
| ✅ roles | Active | 5 | RBAC role definitions |
|
||||
| ✅ permissions | Active | 25 | RBAC permission definitions |
|
||||
| ✅ role_permissions | Active | 49 | Role-permission mappings |
|
||||
| ✅ user_invitations | Active | 0 | Admin invitation system |
|
||||
| ✅ subscriptions | Active | 0 | User subscriptions |
|
||||
| ✅ subscription_plans | Active | 3 | Available membership plans |
|
||||
| ✅ donations | Active | 0 | Donation tracking |
|
||||
| ✅ import_jobs | Active | 0 | CSV import tracking |
|
||||
| ✅ import_rollback_audit | Active | 0 | Import rollback audit trail |
|
||||
| ✅ newsletter_archives | Active | 0 | Newsletter document archive |
|
||||
| ✅ financial_reports | Active | 0 | Financial document archive |
|
||||
| ✅ bylaws_documents | Active | 0 | Bylaws document archive |
|
||||
| ✅ storage_usage | Active | 1 | Storage quota tracking (100GB limit) |
|
||||
|
||||
### ENUMs (8 total)
|
||||
|
||||
| ENUM Name | Values | Used By |
|
||||
|-----------|--------|---------|
|
||||
| ✅ userstatus | pending_email, awaiting_event, pre_approved, payment_pending, active, inactive | users.status |
|
||||
| ✅ userrole | guest, member, admin, finance, superadmin | users.role, user_invitations.role |
|
||||
| ✅ rsvpstatus | yes, no, maybe | event_rsvps.rsvp_status |
|
||||
| ✅ subscriptionstatus | active, past_due, canceled, incomplete, trialing | subscriptions.status |
|
||||
| ✅ donationtype | one_time, recurring, pledge, in_kind, memorial | donations.donation_type |
|
||||
| ✅ donationstatus | pending, completed, failed, refunded | donations.status |
|
||||
| ✅ invitationstatus | pending, accepted, expired, revoked | user_invitations.status |
|
||||
| ✅ importjobstatus | processing, completed, failed | import_jobs.status |
|
||||
|
||||
---
|
||||
|
||||
## Seed Data Loaded
|
||||
|
||||
### Roles (5)
|
||||
|
||||
| Code | Name | System Role | Permissions |
|
||||
|------|------|-------------|-------------|
|
||||
| admin | Admin | Yes | 16 |
|
||||
| finance | Finance | Yes | 7 |
|
||||
| guest | Guest | Yes | 0 |
|
||||
| member | Member | Yes | 1 |
|
||||
| superadmin | Super Admin | Yes | 25 |
|
||||
|
||||
### Permissions (25 across 5 modules)
|
||||
|
||||
**Users Module (6 permissions):**
|
||||
- users.view - View Users
|
||||
- users.create - Create Users
|
||||
- users.edit - Edit Users
|
||||
- users.delete - Delete Users
|
||||
- users.approve - Approve Users
|
||||
- users.import - Import Users
|
||||
|
||||
**Events Module (6 permissions):**
|
||||
- events.view - View Events
|
||||
- events.create - Create Events
|
||||
- events.edit - Edit Events
|
||||
- events.delete - Delete Events
|
||||
- events.publish - Publish Events
|
||||
- events.manage_attendance - Manage Attendance
|
||||
|
||||
**Finance Module (5 permissions):**
|
||||
- finance.view - View Financial Data
|
||||
- finance.manage_plans - Manage Subscription Plans
|
||||
- finance.manage_subscriptions - Manage Subscriptions
|
||||
- finance.view_reports - View Financial Reports
|
||||
- finance.export - Export Financial Data
|
||||
|
||||
**Content Module (3 permissions):**
|
||||
- content.newsletters - Manage Newsletters
|
||||
- content.documents - Manage Documents
|
||||
- content.gallery - Manage Gallery
|
||||
|
||||
**System Module (5 permissions):**
|
||||
- system.settings - System Settings
|
||||
- system.roles - Manage Roles
|
||||
- system.invitations - Manage Invitations
|
||||
- system.storage - Manage Storage
|
||||
- system.audit - View Audit Logs
|
||||
|
||||
### Subscription Plans (3)
|
||||
|
||||
| Plan Name | Price | Billing | Custom Pricing | Donation Support |
|
||||
|-----------|-------|---------|----------------|------------------|
|
||||
| Pay What You Want Membership | $30.00 (min) | Annual | ✅ Yes | ✅ Yes |
|
||||
| Annual Individual Membership | $60.00 | Annual | ❌ No | ❌ No |
|
||||
| Annual Group Membership | $100.00 | Annual | ❌ No | ❌ No |
|
||||
|
||||
**Note:** Stripe price IDs need to be configured after Stripe setup.
|
||||
|
||||
---
|
||||
|
||||
## Migration Status
|
||||
|
||||
**Current Revision:** `001_initial_baseline (head)`
|
||||
**Migration System:** Alembic 1.14.0
|
||||
**Schema Source:** `migrations/000_initial_schema.sql`
|
||||
**Seed Source:** `migrations/seed_data.sql`
|
||||
|
||||
**Migration History:**
|
||||
- `001_initial_baseline` - Empty baseline marker (2026-01-02)
|
||||
|
||||
**Future migrations** will be generated using:
|
||||
```bash
|
||||
alembic revision --autogenerate -m "description"
|
||||
alembic upgrade head
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Next Steps
|
||||
|
||||
### Immediate (Required)
|
||||
|
||||
1. **Create Superadmin User**
|
||||
```bash
|
||||
cd backend
|
||||
python3 create_superadmin.py
|
||||
```
|
||||
|
||||
2. **Configure Stripe Price IDs**
|
||||
```sql
|
||||
UPDATE subscription_plans
|
||||
SET stripe_price_id = 'price_xxx'
|
||||
WHERE name = 'Annual Individual Membership';
|
||||
|
||||
UPDATE subscription_plans
|
||||
SET stripe_price_id = 'price_yyy'
|
||||
WHERE name = 'Annual Group Membership';
|
||||
|
||||
UPDATE subscription_plans
|
||||
SET stripe_price_id = 'price_zzz'
|
||||
WHERE name = 'Pay What You Want Membership';
|
||||
```
|
||||
|
||||
3. **Set Environment Variables**
|
||||
- Copy `backend/.env.example` to `backend/.env`
|
||||
- Fill in all required values (DATABASE_URL, JWT_SECRET, SMTP, Stripe, R2)
|
||||
|
||||
4. **Test Application**
|
||||
```bash
|
||||
# Backend
|
||||
cd backend
|
||||
uvicorn server:app --reload
|
||||
|
||||
# Frontend (separate terminal)
|
||||
cd frontend
|
||||
yarn start
|
||||
```
|
||||
|
||||
### Optional (Recommended)
|
||||
|
||||
1. **Add Sample Events**
|
||||
- Login as superadmin
|
||||
- Navigate to Admin → Events
|
||||
- Create 2-3 sample events
|
||||
|
||||
2. **Test Registration Flow**
|
||||
- Register a test user
|
||||
- Verify email verification works
|
||||
- Test event RSVP
|
||||
- Test admin approval flow
|
||||
|
||||
3. **Configure Email Templates**
|
||||
- Review templates in `backend/email_service.py`
|
||||
- Customize colors, branding, copy
|
||||
|
||||
4. **Set Up Monitoring**
|
||||
- Configure error logging
|
||||
- Set up uptime monitoring
|
||||
- Configure backup schedule
|
||||
|
||||
---
|
||||
|
||||
## Database Maintenance
|
||||
|
||||
### Backup Command
|
||||
|
||||
```bash
|
||||
PGPASSWORD='your-password' pg_dump -h 10.9.23.11 -p 54321 -U postgres loaf_new > backup_$(date +%Y%m%d_%H%M%S).sql
|
||||
```
|
||||
|
||||
### Restore Command
|
||||
|
||||
```bash
|
||||
PGPASSWORD='your-password' psql -h 10.9.23.11 -p 54321 -U postgres -d loaf_new < backup_file.sql
|
||||
```
|
||||
|
||||
### Health Check Queries
|
||||
|
||||
```sql
|
||||
-- Check user count by status
|
||||
SELECT status, COUNT(*) FROM users GROUP BY status;
|
||||
|
||||
-- Check upcoming events
|
||||
SELECT title, start_at FROM events WHERE start_at > NOW() ORDER BY start_at LIMIT 5;
|
||||
|
||||
-- Check active subscriptions
|
||||
SELECT COUNT(*) FROM subscriptions WHERE status = 'active';
|
||||
|
||||
-- Check storage usage
|
||||
SELECT
|
||||
total_bytes_used / 1024 / 1024 / 1024 as used_gb,
|
||||
max_bytes_allowed / 1024 / 1024 / 1024 as max_gb,
|
||||
ROUND((total_bytes_used::numeric / max_bytes_allowed * 100)::numeric, 2) as percent_used
|
||||
FROM storage_usage;
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Support & Resources
|
||||
|
||||
- **Deployment Guide:** See `DEPLOYMENT.md` for complete deployment instructions
|
||||
- **API Documentation:** http://localhost:8000/docs (when backend running)
|
||||
- **Alembic Guide:** See `backend/alembic/README.md` for migration documentation
|
||||
- **Project Documentation:** See `CLAUDE.md` for codebase overview
|
||||
|
||||
---
|
||||
|
||||
## Changelog
|
||||
|
||||
**2026-01-03:**
|
||||
- ✅ Created all 17 data tables
|
||||
- ✅ Created all 8 ENUMs
|
||||
- ✅ Loaded seed data (5 roles, 25 permissions, 3 subscription plans)
|
||||
- ✅ Initialized Alembic tracking (001_initial_baseline)
|
||||
- ✅ Created superadmin user helper script
|
||||
|
||||
**Status:** Database is fully initialized and ready for use. Next step: Create superadmin user and start application.
|
||||
379
DEPLOYMENT.md
Normal file
379
DEPLOYMENT.md
Normal file
@@ -0,0 +1,379 @@
|
||||
# Deployment Guide - LOAF Membership Platform
|
||||
|
||||
## Fresh Database Installation
|
||||
|
||||
Follow these steps in order for a **brand new deployment**:
|
||||
|
||||
### Step 1: Create PostgreSQL Database
|
||||
|
||||
```bash
|
||||
# Connect to PostgreSQL
|
||||
psql -U postgres
|
||||
|
||||
# Create database
|
||||
CREATE DATABASE membership_db;
|
||||
|
||||
# Create user (if needed)
|
||||
CREATE USER loaf_admin WITH PASSWORD 'your-secure-password';
|
||||
GRANT ALL PRIVILEGES ON DATABASE membership_db TO loaf_admin;
|
||||
|
||||
# Exit PostgreSQL
|
||||
\q
|
||||
```
|
||||
|
||||
### Step 2: Run Initial Schema
|
||||
|
||||
```bash
|
||||
cd backend
|
||||
|
||||
# Apply the complete schema (creates all 17 tables, 8 enums, indexes)
|
||||
psql -U loaf_admin -d membership_db -f migrations/000_initial_schema.sql
|
||||
```
|
||||
|
||||
**What this creates:**
|
||||
- ✅ 17 tables: users, events, subscriptions, roles, permissions, etc.
|
||||
- ✅ 8 custom enums: UserStatus, UserRole, RSVPStatus, etc.
|
||||
- ✅ All indexes and foreign keys
|
||||
- ✅ All constraints and defaults
|
||||
|
||||
### Step 3: Mark Database for Alembic Tracking
|
||||
|
||||
```bash
|
||||
# Mark the database as being at the baseline
|
||||
alembic stamp head
|
||||
```
|
||||
|
||||
### Step 4: Verify Setup
|
||||
|
||||
```bash
|
||||
# Check Alembic status
|
||||
alembic current
|
||||
# Expected output: 001_initial_baseline (head)
|
||||
|
||||
# Check database tables
|
||||
psql -U loaf_admin -d membership_db -c "\dt"
|
||||
# Should show 17 tables
|
||||
```
|
||||
|
||||
### Step 5: Set Environment Variables
|
||||
|
||||
Create `backend/.env`:
|
||||
|
||||
```env
|
||||
# Database
|
||||
DATABASE_URL=postgresql://loaf_admin:your-password@localhost:5432/membership_db
|
||||
|
||||
# JWT
|
||||
JWT_SECRET=your-secret-key-minimum-32-characters-long
|
||||
JWT_ALGORITHM=HS256
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES=30
|
||||
|
||||
# Email (SMTP)
|
||||
SMTP_HOST=smtp.gmail.com
|
||||
SMTP_PORT=587
|
||||
SMTP_USERNAME=your-email@gmail.com
|
||||
SMTP_PASSWORD=your-app-password
|
||||
SMTP_FROM_EMAIL=noreply@loafmembers.org
|
||||
SMTP_FROM_NAME=LOAF Membership
|
||||
|
||||
# Frontend URL
|
||||
FRONTEND_URL=https://members.loafmembers.org
|
||||
|
||||
# Cloudflare R2
|
||||
R2_ENDPOINT_URL=https://your-account-id.r2.cloudflarestorage.com
|
||||
R2_ACCESS_KEY_ID=your-r2-access-key
|
||||
R2_SECRET_ACCESS_KEY=your-r2-secret-key
|
||||
R2_BUCKET_NAME=loaf-membership
|
||||
R2_PUBLIC_URL=https://cdn.loafmembers.org
|
||||
|
||||
# Stripe
|
||||
STRIPE_SECRET_KEY=sk_live_...
|
||||
STRIPE_WEBHOOK_SECRET=whsec_...
|
||||
STRIPE_PRICE_ID_ANNUAL=price_...
|
||||
STRIPE_PRICE_ID_GROUP=price_...
|
||||
```
|
||||
|
||||
### Step 6: Install Dependencies
|
||||
|
||||
```bash
|
||||
# Backend
|
||||
cd backend
|
||||
pip install -r requirements.txt
|
||||
|
||||
# Frontend
|
||||
cd ../frontend
|
||||
yarn install
|
||||
```
|
||||
|
||||
### Step 7: Start Services
|
||||
|
||||
```bash
|
||||
# Backend (in backend/)
|
||||
uvicorn server:app --host 0.0.0.0 --port 8000
|
||||
|
||||
# Frontend (in frontend/)
|
||||
yarn start
|
||||
```
|
||||
|
||||
### Step 8: Create First Superadmin User
|
||||
|
||||
```bash
|
||||
# Connect to database
|
||||
psql -U loaf_admin -d membership_db
|
||||
|
||||
# Create superadmin user
|
||||
INSERT INTO users (
|
||||
id, email, password_hash, first_name, last_name,
|
||||
status, role, email_verified, created_at, updated_at
|
||||
) VALUES (
|
||||
gen_random_uuid(),
|
||||
'admin@loafmembers.org',
|
||||
'$2b$12$your-bcrypt-hashed-password-here', -- Use bcrypt to hash password
|
||||
'Admin',
|
||||
'User',
|
||||
'active',
|
||||
'superadmin',
|
||||
true,
|
||||
NOW(),
|
||||
NOW()
|
||||
);
|
||||
```
|
||||
|
||||
**Generate password hash:**
|
||||
```python
|
||||
import bcrypt
|
||||
password = b"your-secure-password"
|
||||
hashed = bcrypt.hashpw(password, bcrypt.gensalt())
|
||||
print(hashed.decode())
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Existing Database Update
|
||||
|
||||
For **updating an existing deployment** with new code:
|
||||
|
||||
### Step 1: Backup Database
|
||||
|
||||
```bash
|
||||
pg_dump -U loaf_admin membership_db > backup_$(date +%Y%m%d_%H%M%S).sql
|
||||
```
|
||||
|
||||
### Step 2: Pull Latest Code
|
||||
|
||||
```bash
|
||||
git pull origin main
|
||||
```
|
||||
|
||||
### Step 3: Install New Dependencies
|
||||
|
||||
```bash
|
||||
# Backend
|
||||
cd backend
|
||||
pip install -r requirements.txt
|
||||
|
||||
# Frontend
|
||||
cd ../frontend
|
||||
yarn install
|
||||
```
|
||||
|
||||
### Step 4: Apply Database Migrations
|
||||
|
||||
```bash
|
||||
cd backend
|
||||
|
||||
# Check current migration status
|
||||
alembic current
|
||||
|
||||
# Apply pending migrations
|
||||
alembic upgrade head
|
||||
|
||||
# Verify
|
||||
alembic current
|
||||
```
|
||||
|
||||
### Step 5: Restart Services
|
||||
|
||||
```bash
|
||||
# Restart backend
|
||||
systemctl restart membership-backend
|
||||
|
||||
# Rebuild and restart frontend
|
||||
cd frontend
|
||||
yarn build
|
||||
systemctl restart membership-frontend
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## First-Time Alembic Setup (Existing Database)
|
||||
|
||||
If you have an **existing database** that was created with `000_initial_schema.sql` but hasn't been marked for Alembic tracking:
|
||||
|
||||
```bash
|
||||
cd backend
|
||||
|
||||
# Mark database as being at the baseline (one-time only)
|
||||
alembic stamp head
|
||||
|
||||
# Verify
|
||||
alembic current
|
||||
# Expected output: 001_initial_baseline (head)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Database Schema Verification
|
||||
|
||||
**Check all tables exist:**
|
||||
```bash
|
||||
psql -U loaf_admin -d membership_db -c "\dt"
|
||||
```
|
||||
|
||||
**Expected tables (17 total):**
|
||||
- users
|
||||
- events
|
||||
- event_rsvps
|
||||
- subscriptions
|
||||
- subscription_plans
|
||||
- permissions
|
||||
- roles
|
||||
- role_permissions
|
||||
- user_invitations
|
||||
- import_jobs
|
||||
- import_rollback_audit
|
||||
- event_galleries
|
||||
- newsletter_archives
|
||||
- financial_reports
|
||||
- bylaws_documents
|
||||
- donations
|
||||
- storage_usage
|
||||
|
||||
**Check enums:**
|
||||
```bash
|
||||
psql -U loaf_admin -d membership_db -c "\dT"
|
||||
```
|
||||
|
||||
**Expected enums (8 total):**
|
||||
- userstatus
|
||||
- userrole
|
||||
- rsvpstatus
|
||||
- subscriptionstatus
|
||||
- donationtype
|
||||
- donationstatus
|
||||
- invitationstatus
|
||||
- importjobstatus
|
||||
|
||||
---
|
||||
|
||||
## Rollback Procedures
|
||||
|
||||
### Rollback Last Migration
|
||||
|
||||
```bash
|
||||
cd backend
|
||||
alembic downgrade -1
|
||||
```
|
||||
|
||||
### Rollback to Specific Revision
|
||||
|
||||
```bash
|
||||
alembic downgrade <revision_id>
|
||||
```
|
||||
|
||||
### Complete Database Reset
|
||||
|
||||
```bash
|
||||
# WARNING: This deletes ALL data!
|
||||
|
||||
# 1. Backup first
|
||||
pg_dump -U loaf_admin membership_db > emergency_backup.sql
|
||||
|
||||
# 2. Drop database
|
||||
dropdb membership_db
|
||||
|
||||
# 3. Recreate database
|
||||
createdb membership_db
|
||||
|
||||
# 4. Run initial schema
|
||||
psql -U loaf_admin -d membership_db -f backend/migrations/000_initial_schema.sql
|
||||
|
||||
# 5. Mark for Alembic
|
||||
cd backend
|
||||
alembic stamp head
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "relation does not exist" error
|
||||
|
||||
The database wasn't initialized properly.
|
||||
|
||||
**Solution:**
|
||||
```bash
|
||||
psql -U loaf_admin -d membership_db -f backend/migrations/000_initial_schema.sql
|
||||
alembic stamp head
|
||||
```
|
||||
|
||||
### "Target database is not up to date"
|
||||
|
||||
Migrations haven't been applied.
|
||||
|
||||
**Solution:**
|
||||
```bash
|
||||
cd backend
|
||||
alembic upgrade head
|
||||
```
|
||||
|
||||
### "Can't locate revision"
|
||||
|
||||
Alembic tracking is out of sync.
|
||||
|
||||
**Solution:**
|
||||
```bash
|
||||
# Check what revision the database thinks it's at
|
||||
alembic current
|
||||
|
||||
# If empty or wrong, manually set it
|
||||
alembic stamp head
|
||||
```
|
||||
|
||||
### Database connection errors
|
||||
|
||||
Check `.env` file has correct `DATABASE_URL`.
|
||||
|
||||
**Format:**
|
||||
```
|
||||
DATABASE_URL=postgresql://username:password@host:port/database
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Production Checklist
|
||||
|
||||
Before going live:
|
||||
|
||||
- [ ] Database created and schema applied
|
||||
- [ ] Alembic marked as up-to-date (`alembic current` shows baseline)
|
||||
- [ ] All environment variables set in `.env`
|
||||
- [ ] Dependencies installed (Python + Node)
|
||||
- [ ] Superadmin user created
|
||||
- [ ] SSL certificates configured
|
||||
- [ ] Backup system in place
|
||||
- [ ] Monitoring configured
|
||||
- [ ] Domain DNS pointing to server
|
||||
- [ ] Email sending verified (SMTP working)
|
||||
- [ ] Stripe webhook endpoint configured
|
||||
- [ ] R2 bucket accessible and CORS configured
|
||||
|
||||
---
|
||||
|
||||
## Support
|
||||
|
||||
For issues:
|
||||
1. Check logs: `tail -f backend/logs/app.log`
|
||||
2. Check Alembic status: `alembic current`
|
||||
3. Verify environment variables: `cat backend/.env`
|
||||
4. Test database connection: `psql -U loaf_admin -d membership_db`
|
||||
287
DEPLOYMENT_GUIDE.md
Normal file
287
DEPLOYMENT_GUIDE.md
Normal file
@@ -0,0 +1,287 @@
|
||||
# Deployment Guide - Dynamic RBAC System
|
||||
|
||||
This guide covers deploying the dynamic Role-Based Access Control (RBAC) system to your dev server.
|
||||
|
||||
## Overview
|
||||
|
||||
The RBAC migration consists of 4 phases:
|
||||
- **Phase 1**: Add new database tables and columns (schema changes)
|
||||
- **Phase 2**: Seed system roles
|
||||
- **Phase 3**: Migrate existing data
|
||||
- **Phase 4**: System is fully operational with dynamic roles
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Database backup completed ✓
|
||||
- PostgreSQL access credentials
|
||||
- Python 3.8+ environment
|
||||
- All dependencies installed (`pip install -r requirements.txt`)
|
||||
|
||||
---
|
||||
|
||||
## Step-by-Step Deployment
|
||||
|
||||
### Step 1: Run Schema Migration (Phase 1)
|
||||
|
||||
This creates the new `roles` table and adds `role_id` columns to `users` and `role_permissions` tables.
|
||||
|
||||
```bash
|
||||
# Connect to your database
|
||||
psql -U <username> -d <database_name> -f migrations/006_add_dynamic_roles.sql
|
||||
```
|
||||
|
||||
**What this does:**
|
||||
- Creates `roles` table
|
||||
- Adds `role_id` column to `users` (nullable)
|
||||
- Adds `role_id` column to `role_permissions` (nullable)
|
||||
- Legacy `role` enum columns remain for backward compatibility
|
||||
|
||||
**Expected output:**
|
||||
```
|
||||
Step 1 completed: roles table created
|
||||
Step 2 completed: role_id column added to users table
|
||||
Step 3 completed: role_id column added to role_permissions table
|
||||
Migration 006 completed successfully!
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Step 2: Seed System Roles (Phase 2)
|
||||
|
||||
This creates the 5 system roles: Superadmin, Admin, Finance, Member, Guest.
|
||||
|
||||
```bash
|
||||
cd backend
|
||||
python3 roles_seed.py
|
||||
```
|
||||
|
||||
**Expected output:**
|
||||
```
|
||||
Starting roles seeding...
|
||||
Created role: Superadmin (superadmin) - System role
|
||||
Created role: Admin (admin) - System role
|
||||
Created role: Finance (finance) - System role
|
||||
Created role: Member (member) - System role
|
||||
Created role: Guest (guest) - System role
|
||||
|
||||
Roles seeding completed!
|
||||
Total roles created: 5
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Step 3: Migrate Existing Users (Phase 3a)
|
||||
|
||||
This migrates all existing users from enum roles to the new dynamic role system.
|
||||
|
||||
```bash
|
||||
python3 migrate_users_to_dynamic_roles.py
|
||||
```
|
||||
|
||||
**Expected output:**
|
||||
```
|
||||
Starting user migration to dynamic roles...
|
||||
Processing user: admin@loaf.org (superadmin)
|
||||
✓ Mapped to role: Superadmin
|
||||
Processing user: finance@loaf.org (finance)
|
||||
✓ Mapped to role: Finance
|
||||
...
|
||||
User migration completed successfully!
|
||||
Total users migrated: X
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Step 4: Migrate Role Permissions (Phase 3b)
|
||||
|
||||
This migrates all existing role-permission mappings to use role_id.
|
||||
|
||||
```bash
|
||||
python3 migrate_role_permissions_to_dynamic_roles.py
|
||||
```
|
||||
|
||||
**Expected output:**
|
||||
```
|
||||
Starting role permissions migration to dynamic roles...
|
||||
Migrating permissions for role: guest
|
||||
✓ Migrated: events.view (X permissions)
|
||||
Migrating permissions for role: member
|
||||
✓ Migrated: events.create (X permissions)
|
||||
...
|
||||
Role permissions migration completed successfully!
|
||||
Total role_permission records migrated: X
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Step 5: Verify Migration
|
||||
|
||||
Run this verification script to ensure everything migrated correctly:
|
||||
|
||||
```bash
|
||||
python3 verify_admin_account.py
|
||||
```
|
||||
|
||||
**Expected output:**
|
||||
```
|
||||
================================================================================
|
||||
VERIFYING admin@loaf.org ACCOUNT
|
||||
================================================================================
|
||||
|
||||
✅ User found: Admin User
|
||||
Email: admin@loaf.org
|
||||
Status: UserStatus.active
|
||||
Email Verified: True
|
||||
|
||||
📋 Legacy Role (enum): superadmin
|
||||
✅ Dynamic Role: Superadmin (code: superadmin)
|
||||
Role ID: <uuid>
|
||||
Is System Role: True
|
||||
|
||||
🔐 Checking Permissions:
|
||||
Total permissions assigned to role: 56
|
||||
|
||||
🎯 Access Check:
|
||||
✅ User should have admin access (based on legacy enum)
|
||||
✅ User should have admin access (based on dynamic role)
|
||||
|
||||
================================================================================
|
||||
VERIFICATION COMPLETE
|
||||
================================================================================
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Step 6: Deploy Backend Code
|
||||
|
||||
```bash
|
||||
# Pull latest code
|
||||
git pull origin main
|
||||
|
||||
# Restart backend server
|
||||
# (adjust based on your deployment method)
|
||||
systemctl restart membership-backend
|
||||
# OR
|
||||
pm2 restart membership-backend
|
||||
# OR
|
||||
supervisorctl restart membership-backend
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Step 7: Verify API Endpoints
|
||||
|
||||
Test the role management endpoints:
|
||||
|
||||
```bash
|
||||
# Get all roles
|
||||
curl -H "Authorization: Bearer <token>" \
|
||||
http://your-server/api/admin/roles
|
||||
|
||||
# Get all permissions
|
||||
curl -H "Authorization: Bearer <token>" \
|
||||
http://your-server/api/admin/permissions
|
||||
|
||||
# Test export (the issue we just fixed)
|
||||
curl -H "Authorization: Bearer <token>" \
|
||||
http://your-server/api/admin/users/export
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Rollback Plan (If Needed)
|
||||
|
||||
If something goes wrong, you can rollback:
|
||||
|
||||
```sql
|
||||
BEGIN;
|
||||
|
||||
-- Remove new columns
|
||||
ALTER TABLE users DROP COLUMN IF EXISTS role_id;
|
||||
ALTER TABLE role_permissions DROP COLUMN IF EXISTS role_id;
|
||||
|
||||
-- Drop roles table
|
||||
DROP TABLE IF EXISTS roles CASCADE;
|
||||
|
||||
COMMIT;
|
||||
```
|
||||
|
||||
Then restore from your backup if needed.
|
||||
|
||||
---
|
||||
|
||||
## Post-Deployment Checklist
|
||||
|
||||
- [ ] Schema migration completed without errors
|
||||
- [ ] System roles seeded (5 roles created)
|
||||
- [ ] All users migrated to dynamic roles
|
||||
- [ ] All role permissions migrated
|
||||
- [ ] Admin account verified
|
||||
- [ ] Backend server restarted
|
||||
- [ ] Export endpoint working (no 500 error)
|
||||
- [ ] Admin can view roles in UI (/admin/permissions)
|
||||
- [ ] Admin can create/edit roles
|
||||
- [ ] Admin can assign permissions to roles
|
||||
- [ ] Staff invitation uses dynamic roles
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Issue: Migration script fails
|
||||
|
||||
**Solution:** Check your `.env` file has correct `DATABASE_URL`:
|
||||
```
|
||||
DATABASE_URL=postgresql://user:password@host:port/database
|
||||
```
|
||||
|
||||
### Issue: "role_id column already exists"
|
||||
|
||||
**Solution:** This is safe to ignore. The migration uses `IF NOT EXISTS` clauses.
|
||||
|
||||
### Issue: "No roles found" when migrating users
|
||||
|
||||
**Solution:** Make sure you ran Step 2 (roles_seed.py) before Step 3.
|
||||
|
||||
### Issue: Export still returns 500 error
|
||||
|
||||
**Solution:**
|
||||
1. Verify backend code is latest version
|
||||
2. Check server.py has export route BEFORE {user_id} route (around line 1965)
|
||||
3. Restart backend server
|
||||
|
||||
### Issue: Permission denied errors
|
||||
|
||||
**Solution:** Make sure your database user has permissions:
|
||||
```sql
|
||||
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO <username>;
|
||||
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO <username>;
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Files Involved
|
||||
|
||||
### Migration Files
|
||||
- `migrations/006_add_dynamic_roles.sql` - Schema changes
|
||||
- `roles_seed.py` - Seed system roles
|
||||
- `migrate_users_to_dynamic_roles.py` - Migrate user data
|
||||
- `migrate_role_permissions_to_dynamic_roles.py` - Migrate permission data
|
||||
- `verify_admin_account.py` - Verification script
|
||||
|
||||
### Code Changes
|
||||
- `server.py` - Route reordering (export before {user_id})
|
||||
- `auth.py` - get_user_role_code() helper
|
||||
- `models.py` - Role model, role_id columns
|
||||
- Frontend: AdminRoles.js, InviteStaffDialog.js, AdminStaff.js, Navbar.js, Login.js
|
||||
|
||||
---
|
||||
|
||||
## Support
|
||||
|
||||
If you encounter issues during deployment, check:
|
||||
1. Backend logs: `tail -f /path/to/backend.log`
|
||||
2. Database logs: Check PostgreSQL error logs
|
||||
3. Frontend console: Browser developer tools
|
||||
|
||||
For questions, refer to the CLAUDE.md file for system architecture details.
|
||||
42
Dockerfile
42
Dockerfile
@@ -1,18 +1,40 @@
|
||||
# Dockerfile.dev
|
||||
FROM python:3.12-slim
|
||||
# Backend Dockerfile - FastAPI with Python
|
||||
FROM python:3.11-slim
|
||||
|
||||
# Set working directory
|
||||
# Set environment variables
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ENV PYTHONPATH=/app
|
||||
|
||||
# Set work directory
|
||||
WORKDIR /app
|
||||
|
||||
# Install dependencies
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
gcc \
|
||||
libpq-dev \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy all code
|
||||
# Install Python dependencies
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application code
|
||||
COPY . .
|
||||
|
||||
# Expose port for FastAPI
|
||||
# Create non-root user for security
|
||||
RUN adduser --disabled-password --gecos '' appuser && \
|
||||
chown -R appuser:appuser /app
|
||||
USER appuser
|
||||
|
||||
# Expose port
|
||||
EXPOSE 8000
|
||||
|
||||
# Run uvicorn in dev mode with reload
|
||||
CMD ["python", "-m", "uvicorn", "server:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:8000/health || exit 1
|
||||
|
||||
# Run the application
|
||||
CMD ["uvicorn", "server:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
|
||||
328
MIGRATIONS.md
Normal file
328
MIGRATIONS.md
Normal file
@@ -0,0 +1,328 @@
|
||||
# Database Migrations Guide
|
||||
|
||||
This document explains how to set up the database for the LOAF membership platform on a fresh server.
|
||||
|
||||
---
|
||||
|
||||
## Quick Start (Fresh Database Setup)
|
||||
|
||||
For a **brand new deployment** on a fresh PostgreSQL database:
|
||||
|
||||
```bash
|
||||
# 1. Create PostgreSQL database
|
||||
psql -U postgres
|
||||
CREATE DATABASE membership_db;
|
||||
CREATE USER membership_user WITH PASSWORD 'your_password';
|
||||
GRANT ALL PRIVILEGES ON DATABASE membership_db TO membership_user;
|
||||
\q
|
||||
|
||||
# 2. Run initial schema migration
|
||||
psql -U postgres -d membership_db -f migrations/000_initial_schema.sql
|
||||
|
||||
# 3. Seed permissions and RBAC roles
|
||||
python seed_permissions_rbac.py
|
||||
|
||||
# 4. Create superadmin user
|
||||
python create_admin.py
|
||||
```
|
||||
|
||||
**That's it!** The database is now fully configured and ready for use.
|
||||
|
||||
---
|
||||
|
||||
## Migration Files Overview
|
||||
|
||||
### Core Migration (Run This First)
|
||||
|
||||
**`000_initial_schema.sql`** ✅ **START HERE**
|
||||
- Creates all 10 ENUM types (userstatus, userrole, rsvpstatus, etc.)
|
||||
- Creates all 17 base tables:
|
||||
- Core: users, events, event_rsvps, event_galleries
|
||||
- Financial: subscription_plans, subscriptions, donations
|
||||
- Documents: newsletter_archives, financial_reports, bylaws_documents
|
||||
- RBAC: permissions, roles, role_permissions
|
||||
- System: storage_usage, user_invitations, import_jobs
|
||||
- Creates 30+ performance indexes
|
||||
- Initializes default storage_usage record
|
||||
- **Status:** Required for fresh deployment
|
||||
- **Run Order:** #1
|
||||
|
||||
---
|
||||
|
||||
### Incremental Migrations (Historical - Only for Existing Databases)
|
||||
|
||||
These migrations were created to update existing databases incrementally. **If you're starting fresh with 000_initial_schema.sql, you DO NOT need to run these** as their changes are already included in the initial schema.
|
||||
|
||||
| File | Purpose | Included in 000? | Run on Fresh DB? |
|
||||
|------|---------|------------------|------------------|
|
||||
| `001_add_member_since_field.sql` | Adds member_since field to users | ✅ Yes | ❌ No |
|
||||
| `002_rename_approval_to_validation.sql` | Renames approval-related fields | ✅ Yes | ❌ No |
|
||||
| `003_add_tos_acceptance.sql` | Adds TOS acceptance tracking | ✅ Yes | ❌ No |
|
||||
| `004_add_reminder_tracking_fields.sql` | Adds reminder sent flags | ✅ Yes | ❌ No |
|
||||
| `005_add_rbac_and_invitations.sql` | Adds RBAC permissions & invitations | ✅ Yes | ❌ No |
|
||||
| `006_add_dynamic_roles.sql` | Adds dynamic roles table | ✅ Yes | ❌ No |
|
||||
| `009_create_donations.sql` | Creates donations table | ✅ Yes | ❌ No |
|
||||
| `010_add_rejection_fields.sql` | Adds rejection tracking to users | ✅ Yes | ❌ No |
|
||||
|
||||
**Note:** These files are kept for reference and for updating existing production databases that were created before 000_initial_schema.sql existed.
|
||||
|
||||
---
|
||||
|
||||
### Ad-Hoc Fix Migrations (Legacy - Do Not Run)
|
||||
|
||||
These were one-time fixes for specific issues during development:
|
||||
|
||||
- `add_calendar_uid.sql` - Added calendar UID field (now in 000)
|
||||
- `complete_fix.sql` - Added various profile fields (now in 000)
|
||||
- `fix_storage_usage.sql` - Fixed storage_usage initialization (now in 000)
|
||||
- `sprint_1_2_3_migration.sql` - Combined early sprint migrations (obsolete)
|
||||
- `verify_columns.sql` - Debugging script (not a migration)
|
||||
|
||||
**Status:** Do NOT run these on any database. They are archived for historical reference only.
|
||||
|
||||
---
|
||||
|
||||
## Python Migration Scripts (Data Migrations)
|
||||
|
||||
These scripts migrate **data**, not schema. Run these AFTER the SQL migrations if you have existing data to migrate:
|
||||
|
||||
| Script | Purpose | When to Run |
|
||||
|--------|---------|-------------|
|
||||
| `migrate_add_manual_payment.py` | Migrates manual payment data | Only if you have existing subscriptions with manual payments |
|
||||
| `migrate_billing_enhancements.py` | Migrates billing cycle data | Only if you have existing subscription plans |
|
||||
| `migrate_multistep_registration.py` | Migrates old registration format | Only if upgrading from Phase 0 |
|
||||
| `migrate_password_reset.py` | Migrates password reset tokens | Only if you have password reset data |
|
||||
| `migrate_role_permissions_to_dynamic_roles.py` | Migrates RBAC permissions | Run after seeding permissions (if upgrading) |
|
||||
| `migrate_status.py` | Migrates user status enum values | Only if upgrading from old status values |
|
||||
| `migrate_users_to_dynamic_roles.py` | Assigns users to dynamic roles | Run after seeding roles (if upgrading) |
|
||||
|
||||
**For Fresh Deployment:** You do NOT need to run any of these Python migration scripts. They are only for migrating data from older versions of the platform.
|
||||
|
||||
---
|
||||
|
||||
## Complete Deployment Workflow
|
||||
|
||||
### Scenario 1: Fresh Server (Brand New Database)
|
||||
|
||||
```bash
|
||||
# Step 1: Create database
|
||||
psql -U postgres << EOF
|
||||
CREATE DATABASE membership_db;
|
||||
CREATE USER membership_user WITH PASSWORD 'secure_password_here';
|
||||
GRANT ALL PRIVILEGES ON DATABASE membership_db TO membership_user;
|
||||
EOF
|
||||
|
||||
# Step 2: Run initial schema
|
||||
psql postgresql://membership_user:secure_password_here@localhost/membership_db \
|
||||
-f migrations/000_initial_schema.sql
|
||||
|
||||
# Expected output:
|
||||
# Step 1/8 completed: ENUM types created
|
||||
# Step 2/8 completed: Core tables created
|
||||
# ...
|
||||
# ✅ Migration 000 completed successfully!
|
||||
|
||||
# Step 3: Seed permissions (59 permissions across 10 modules)
|
||||
python seed_permissions_rbac.py
|
||||
|
||||
# Expected output:
|
||||
# ✅ Seeded 59 permissions
|
||||
# ✅ Created 5 system roles
|
||||
# ✅ Assigned permissions to roles
|
||||
|
||||
# Step 4: Create superadmin user (interactive)
|
||||
python create_admin.py
|
||||
|
||||
# Follow prompts to create your first superadmin account
|
||||
|
||||
# Step 5: Verify database
|
||||
psql postgresql://membership_user:secure_password_here@localhost/membership_db -c "
|
||||
SELECT
|
||||
(SELECT COUNT(*) FROM users) as users,
|
||||
(SELECT COUNT(*) FROM permissions) as permissions,
|
||||
(SELECT COUNT(*) FROM roles) as roles,
|
||||
(SELECT COUNT(*) FROM subscription_plans) as plans;
|
||||
"
|
||||
|
||||
# Expected output (fresh database):
|
||||
# users | permissions | roles | plans
|
||||
# ------+-------------+-------+-------
|
||||
# 1 | 59 | 5 | 0
|
||||
```
|
||||
|
||||
### Scenario 2: Upgrading Existing Database
|
||||
|
||||
If you already have a database with data and need to upgrade:
|
||||
|
||||
```bash
|
||||
# Check what migrations have been applied
|
||||
psql -d membership_db -c "SELECT * FROM users LIMIT 1;" # Check if tables exist
|
||||
|
||||
# Run missing migrations in order
|
||||
# Example: If you're on migration 006, run 009 and 010
|
||||
psql -d membership_db -f migrations/009_create_donations.sql
|
||||
psql -d membership_db -f migrations/010_add_rejection_fields.sql
|
||||
|
||||
# Run data migrations if needed
|
||||
python migrate_users_to_dynamic_roles.py # If upgrading RBAC
|
||||
python migrate_billing_enhancements.py # If upgrading subscriptions
|
||||
|
||||
# Update permissions
|
||||
python seed_permissions_rbac.py
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Verification & Troubleshooting
|
||||
|
||||
### Verify Database Schema
|
||||
|
||||
```bash
|
||||
# Check all tables exist (should show 17 tables)
|
||||
psql -d membership_db -c "\dt"
|
||||
|
||||
# Expected tables:
|
||||
# users, events, event_rsvps, event_galleries
|
||||
# subscription_plans, subscriptions, donations
|
||||
# newsletter_archives, financial_reports, bylaws_documents
|
||||
# permissions, roles, role_permissions
|
||||
# storage_usage, user_invitations, import_jobs
|
||||
|
||||
# Check ENUM types (should show 8 types)
|
||||
psql -d membership_db -c "SELECT typname FROM pg_type WHERE typcategory = 'E';"
|
||||
|
||||
# Expected ENUMs:
|
||||
# userstatus, userrole, rsvpstatus, subscriptionstatus
|
||||
# donationtype, donationstatus, invitationstatus, importjobstatus
|
||||
|
||||
# Check indexes (should show 30+ indexes)
|
||||
psql -d membership_db -c "SELECT indexname FROM pg_indexes WHERE schemaname = 'public';"
|
||||
```
|
||||
|
||||
### Common Issues
|
||||
|
||||
**Issue 1: "relation already exists"**
|
||||
- **Cause:** Migration already run
|
||||
- **Solution:** Safe to ignore. 000_initial_schema.sql uses `IF NOT EXISTS` checks.
|
||||
|
||||
**Issue 2: "type already exists"**
|
||||
- **Cause:** ENUM type already created
|
||||
- **Solution:** Safe to ignore. The migration checks for existing types.
|
||||
|
||||
**Issue 3: "permission denied"**
|
||||
- **Cause:** Database user lacks privileges
|
||||
- **Solution:**
|
||||
```bash
|
||||
psql -U postgres -d membership_db
|
||||
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO membership_user;
|
||||
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO membership_user;
|
||||
```
|
||||
|
||||
**Issue 4: "could not connect to database"**
|
||||
- **Cause:** DATABASE_URL incorrect in .env
|
||||
- **Solution:** Verify connection string format:
|
||||
```
|
||||
DATABASE_URL=postgresql://username:password@localhost:5432/database_name
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Migration History & Rationale
|
||||
|
||||
### Why 000_initial_schema.sql?
|
||||
|
||||
The `000_initial_schema.sql` file was created to consolidate all incremental migrations (001-010) into a single comprehensive schema for fresh deployments. This approach:
|
||||
|
||||
✅ **Simplifies fresh deployments** - One file instead of 10
|
||||
✅ **Reduces errors** - No risk of running migrations out of order
|
||||
✅ **Faster setup** - Single transaction vs multiple files
|
||||
✅ **Easier to maintain** - One source of truth for base schema
|
||||
✅ **Preserves history** - Old migrations kept for existing databases
|
||||
|
||||
### Schema Evolution Timeline
|
||||
|
||||
```
|
||||
Phase 0 (Early Development)
|
||||
├── Basic users table
|
||||
├── Events and RSVPs
|
||||
└── Email verification
|
||||
|
||||
Phase 1 (Current - MVP)
|
||||
├── 000_initial_schema.sql (COMPREHENSIVE)
|
||||
│ ├── All ENUM types
|
||||
│ ├── 17 tables
|
||||
│ ├── 30+ indexes
|
||||
│ └── Default data
|
||||
├── seed_permissions_rbac.py (59 permissions, 5 roles)
|
||||
└── create_admin.py (Interactive superadmin creation)
|
||||
|
||||
Phase 2 (Future - Multi-tenant SaaS)
|
||||
├── Add tenant_id to all tables
|
||||
├── Tenant isolation middleware
|
||||
├── Per-tenant customization
|
||||
└── Tenant provisioning automation
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Database Backup & Restore
|
||||
|
||||
### Backup
|
||||
|
||||
```bash
|
||||
# Full database backup
|
||||
pg_dump -U postgres membership_db > backup_$(date +%Y%m%d).sql
|
||||
|
||||
# Compressed backup
|
||||
pg_dump -U postgres membership_db | gzip > backup_$(date +%Y%m%d).sql.gz
|
||||
|
||||
# Schema only (no data)
|
||||
pg_dump -U postgres --schema-only membership_db > schema_backup.sql
|
||||
|
||||
# Data only (no schema)
|
||||
pg_dump -U postgres --data-only membership_db > data_backup.sql
|
||||
```
|
||||
|
||||
### Restore
|
||||
|
||||
```bash
|
||||
# From uncompressed backup
|
||||
psql -U postgres -d membership_db < backup_20250118.sql
|
||||
|
||||
# From compressed backup
|
||||
gunzip -c backup_20250118.sql.gz | psql -U postgres -d membership_db
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Production Deployment Checklist
|
||||
|
||||
Before deploying to production:
|
||||
|
||||
- [ ] PostgreSQL 13+ installed
|
||||
- [ ] Database created with secure credentials
|
||||
- [ ] `000_initial_schema.sql` executed successfully
|
||||
- [ ] `seed_permissions_rbac.py` completed (59 permissions created)
|
||||
- [ ] Superadmin user created via `create_admin.py`
|
||||
- [ ] DATABASE_URL configured in backend `.env`
|
||||
- [ ] Backend server connects successfully (`uvicorn server:app`)
|
||||
- [ ] Test API endpoints: GET /api/auth/me (should work after login)
|
||||
- [ ] Database backup configured (daily cron job)
|
||||
- [ ] SSL/TLS enabled for PostgreSQL connections
|
||||
- [ ] Firewall rules restrict database access
|
||||
- [ ] Connection pooling configured (if high traffic)
|
||||
|
||||
---
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- **Backend README:** See `README.md` for complete backend setup guide
|
||||
- **API Documentation:** http://localhost:8000/docs (Swagger UI)
|
||||
- **PostgreSQL Docs:** https://www.postgresql.org/docs/13/
|
||||
- **SQLAlchemy Docs:** https://docs.sqlalchemy.org/en/20/
|
||||
|
||||
---
|
||||
|
||||
**Last Updated:** December 18, 2024
|
||||
**Version:** 1.0.0
|
||||
**Maintainer:** LOAF Development Team
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
__pycache__/wordpress_parser.cpython-312.pyc
Normal file
BIN
__pycache__/wordpress_parser.cpython-312.pyc
Normal file
Binary file not shown.
141
add_directory_permissions.py
Normal file
141
add_directory_permissions.py
Normal file
@@ -0,0 +1,141 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Add Directory Permissions Script
|
||||
|
||||
This script adds the new directory.view and directory.manage permissions
|
||||
without clearing existing permissions.
|
||||
|
||||
Usage:
|
||||
python add_directory_permissions.py
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from sqlalchemy import create_engine, text
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from database import Base
|
||||
from models import Permission, RolePermission, Role, UserRole
|
||||
from dotenv import load_dotenv
|
||||
|
||||
# Load environment variables
|
||||
load_dotenv()
|
||||
|
||||
# Database connection
|
||||
DATABASE_URL = os.getenv("DATABASE_URL")
|
||||
if not DATABASE_URL:
|
||||
print("Error: DATABASE_URL environment variable not set")
|
||||
sys.exit(1)
|
||||
|
||||
engine = create_engine(DATABASE_URL)
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
# New directory permissions
|
||||
NEW_PERMISSIONS = [
|
||||
{"code": "directory.view", "name": "View Directory Settings", "description": "View member directory field configuration", "module": "directory"},
|
||||
{"code": "directory.manage", "name": "Manage Directory Fields", "description": "Enable/disable directory fields shown in Profile and Directory pages", "module": "directory"},
|
||||
]
|
||||
|
||||
# Roles that should have these permissions
|
||||
ROLE_PERMISSION_MAP = {
|
||||
"directory.view": ["admin", "superadmin"],
|
||||
"directory.manage": ["admin", "superadmin"],
|
||||
}
|
||||
|
||||
|
||||
def add_directory_permissions():
|
||||
"""Add directory permissions and assign to appropriate roles"""
|
||||
db = SessionLocal()
|
||||
|
||||
try:
|
||||
print("=" * 60)
|
||||
print("Adding Directory Permissions")
|
||||
print("=" * 60)
|
||||
|
||||
# Step 1: Add permissions if they don't exist
|
||||
print("\n1. Adding permissions...")
|
||||
permission_map = {}
|
||||
|
||||
for perm_data in NEW_PERMISSIONS:
|
||||
existing = db.query(Permission).filter(Permission.code == perm_data["code"]).first()
|
||||
if existing:
|
||||
print(f" - {perm_data['code']}: Already exists")
|
||||
permission_map[perm_data["code"]] = existing
|
||||
else:
|
||||
permission = Permission(
|
||||
code=perm_data["code"],
|
||||
name=perm_data["name"],
|
||||
description=perm_data["description"],
|
||||
module=perm_data["module"]
|
||||
)
|
||||
db.add(permission)
|
||||
db.flush() # Get the ID
|
||||
permission_map[perm_data["code"]] = permission
|
||||
print(f" - {perm_data['code']}: Created")
|
||||
|
||||
db.commit()
|
||||
|
||||
# Step 2: Get roles
|
||||
print("\n2. Fetching roles...")
|
||||
roles = db.query(Role).all()
|
||||
role_map = {role.code: role for role in roles}
|
||||
print(f" Found {len(roles)} roles: {', '.join(role_map.keys())}")
|
||||
|
||||
# Enum mapping for backward compatibility
|
||||
role_enum_map = {
|
||||
'guest': UserRole.guest,
|
||||
'member': UserRole.member,
|
||||
'admin': UserRole.admin,
|
||||
'superadmin': UserRole.superadmin,
|
||||
'finance': UserRole.finance
|
||||
}
|
||||
|
||||
# Step 3: Assign permissions to roles
|
||||
print("\n3. Assigning permissions to roles...")
|
||||
for perm_code, role_codes in ROLE_PERMISSION_MAP.items():
|
||||
permission = permission_map.get(perm_code)
|
||||
if not permission:
|
||||
print(f" Warning: Permission {perm_code} not found")
|
||||
continue
|
||||
|
||||
for role_code in role_codes:
|
||||
role = role_map.get(role_code)
|
||||
if not role:
|
||||
print(f" Warning: Role {role_code} not found")
|
||||
continue
|
||||
|
||||
# Check if mapping already exists
|
||||
existing_mapping = db.query(RolePermission).filter(
|
||||
RolePermission.role_id == role.id,
|
||||
RolePermission.permission_id == permission.id
|
||||
).first()
|
||||
|
||||
if existing_mapping:
|
||||
print(f" - {role_code} -> {perm_code}: Already assigned")
|
||||
else:
|
||||
role_enum = role_enum_map.get(role_code, UserRole.guest)
|
||||
mapping = RolePermission(
|
||||
role=role_enum,
|
||||
role_id=role.id,
|
||||
permission_id=permission.id
|
||||
)
|
||||
db.add(mapping)
|
||||
print(f" - {role_code} -> {perm_code}: Assigned")
|
||||
|
||||
db.commit()
|
||||
|
||||
print("\n" + "=" * 60)
|
||||
print("Directory permissions added successfully!")
|
||||
print("=" * 60)
|
||||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
print(f"\nError: {str(e)}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
raise
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
add_directory_permissions()
|
||||
27
add_finance_to_enum.sql
Normal file
27
add_finance_to_enum.sql
Normal file
@@ -0,0 +1,27 @@
|
||||
-- Add 'finance' value to UserRole enum type
|
||||
-- This is needed because we added a new finance role to the dynamic RBAC system
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Add the finance value to the userrole enum if it doesn't already exist
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_enum
|
||||
WHERE enumlabel = 'finance'
|
||||
AND enumtypid = (SELECT oid FROM pg_type WHERE typname = 'userrole')
|
||||
) THEN
|
||||
ALTER TYPE userrole ADD VALUE 'finance';
|
||||
RAISE NOTICE 'Added finance to userrole enum';
|
||||
ELSE
|
||||
RAISE NOTICE 'finance already exists in userrole enum';
|
||||
END IF;
|
||||
END$$;
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- Verify the enum values
|
||||
SELECT enumlabel
|
||||
FROM pg_enum
|
||||
WHERE enumtypid = (SELECT oid FROM pg_type WHERE typname = 'userrole')
|
||||
ORDER BY enumsortorder;
|
||||
141
add_registration_permissions.py
Normal file
141
add_registration_permissions.py
Normal file
@@ -0,0 +1,141 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Add Registration Permissions Script
|
||||
|
||||
This script adds the new registration.view and registration.manage permissions
|
||||
without clearing existing permissions.
|
||||
|
||||
Usage:
|
||||
python add_registration_permissions.py
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from sqlalchemy import create_engine, text
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from database import Base
|
||||
from models import Permission, RolePermission, Role, UserRole
|
||||
from dotenv import load_dotenv
|
||||
|
||||
# Load environment variables
|
||||
load_dotenv()
|
||||
|
||||
# Database connection
|
||||
DATABASE_URL = os.getenv("DATABASE_URL")
|
||||
if not DATABASE_URL:
|
||||
print("Error: DATABASE_URL environment variable not set")
|
||||
sys.exit(1)
|
||||
|
||||
engine = create_engine(DATABASE_URL)
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
# New registration permissions
|
||||
NEW_PERMISSIONS = [
|
||||
{"code": "registration.view", "name": "View Registration Settings", "description": "View registration form schema and settings", "module": "registration"},
|
||||
{"code": "registration.manage", "name": "Manage Registration Form", "description": "Edit registration form schema, steps, and fields", "module": "registration"},
|
||||
]
|
||||
|
||||
# Roles that should have these permissions
|
||||
ROLE_PERMISSION_MAP = {
|
||||
"registration.view": ["admin", "superadmin"],
|
||||
"registration.manage": ["admin", "superadmin"],
|
||||
}
|
||||
|
||||
|
||||
def add_registration_permissions():
|
||||
"""Add registration permissions and assign to appropriate roles"""
|
||||
db = SessionLocal()
|
||||
|
||||
try:
|
||||
print("=" * 60)
|
||||
print("Adding Registration Permissions")
|
||||
print("=" * 60)
|
||||
|
||||
# Step 1: Add permissions if they don't exist
|
||||
print("\n1. Adding permissions...")
|
||||
permission_map = {}
|
||||
|
||||
for perm_data in NEW_PERMISSIONS:
|
||||
existing = db.query(Permission).filter(Permission.code == perm_data["code"]).first()
|
||||
if existing:
|
||||
print(f" - {perm_data['code']}: Already exists")
|
||||
permission_map[perm_data["code"]] = existing
|
||||
else:
|
||||
permission = Permission(
|
||||
code=perm_data["code"],
|
||||
name=perm_data["name"],
|
||||
description=perm_data["description"],
|
||||
module=perm_data["module"]
|
||||
)
|
||||
db.add(permission)
|
||||
db.flush() # Get the ID
|
||||
permission_map[perm_data["code"]] = permission
|
||||
print(f" - {perm_data['code']}: Created")
|
||||
|
||||
db.commit()
|
||||
|
||||
# Step 2: Get roles
|
||||
print("\n2. Fetching roles...")
|
||||
roles = db.query(Role).all()
|
||||
role_map = {role.code: role for role in roles}
|
||||
print(f" Found {len(roles)} roles: {', '.join(role_map.keys())}")
|
||||
|
||||
# Enum mapping for backward compatibility
|
||||
role_enum_map = {
|
||||
'guest': UserRole.guest,
|
||||
'member': UserRole.member,
|
||||
'admin': UserRole.admin,
|
||||
'superadmin': UserRole.superadmin,
|
||||
'finance': UserRole.finance
|
||||
}
|
||||
|
||||
# Step 3: Assign permissions to roles
|
||||
print("\n3. Assigning permissions to roles...")
|
||||
for perm_code, role_codes in ROLE_PERMISSION_MAP.items():
|
||||
permission = permission_map.get(perm_code)
|
||||
if not permission:
|
||||
print(f" Warning: Permission {perm_code} not found")
|
||||
continue
|
||||
|
||||
for role_code in role_codes:
|
||||
role = role_map.get(role_code)
|
||||
if not role:
|
||||
print(f" Warning: Role {role_code} not found")
|
||||
continue
|
||||
|
||||
# Check if mapping already exists
|
||||
existing_mapping = db.query(RolePermission).filter(
|
||||
RolePermission.role_id == role.id,
|
||||
RolePermission.permission_id == permission.id
|
||||
).first()
|
||||
|
||||
if existing_mapping:
|
||||
print(f" - {role_code} -> {perm_code}: Already assigned")
|
||||
else:
|
||||
role_enum = role_enum_map.get(role_code, UserRole.guest)
|
||||
mapping = RolePermission(
|
||||
role=role_enum,
|
||||
role_id=role.id,
|
||||
permission_id=permission.id
|
||||
)
|
||||
db.add(mapping)
|
||||
print(f" - {role_code} -> {perm_code}: Assigned")
|
||||
|
||||
db.commit()
|
||||
|
||||
print("\n" + "=" * 60)
|
||||
print("Registration permissions added successfully!")
|
||||
print("=" * 60)
|
||||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
print(f"\nError: {str(e)}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
raise
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
add_registration_permissions()
|
||||
118
alembic.ini
Normal file
118
alembic.ini
Normal file
@@ -0,0 +1,118 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
# Use forward slashes (/) also on windows to provide an os agnostic path
|
||||
script_location = alembic
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||
# for all available tokens
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python>=3.9 or backports.zoneinfo library.
|
||||
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to ZoneInfo()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to alembic/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
||||
|
||||
# version path separator; As mentioned above, this is the character used to split
|
||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||
# Valid values for version_path_separator are:
|
||||
#
|
||||
# version_path_separator = :
|
||||
# version_path_separator = ;
|
||||
# version_path_separator = space
|
||||
# version_path_separator = newline
|
||||
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
# sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
# Database URL is configured in alembic/env.py from .env file for security
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
||||
# hooks = ruff
|
||||
# ruff.type = exec
|
||||
# ruff.executable = %(here)s/.venv/bin/ruff
|
||||
# ruff.options = --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARNING
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARNING
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
1
alembic/README
Normal file
1
alembic/README
Normal file
@@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
||||
259
alembic/README.md
Normal file
259
alembic/README.md
Normal file
@@ -0,0 +1,259 @@
|
||||
# Alembic Database Migrations
|
||||
|
||||
This directory contains **Alembic** database migrations for the LOAF membership platform.
|
||||
|
||||
## What is Alembic?
|
||||
|
||||
Alembic is a lightweight database migration tool for SQLAlchemy. It allows you to:
|
||||
- Track database schema changes over time
|
||||
- Apply migrations incrementally
|
||||
- Roll back changes if needed
|
||||
- Auto-generate migration scripts from model changes
|
||||
|
||||
## Directory Structure
|
||||
|
||||
```
|
||||
alembic/
|
||||
├── versions/ # Migration scripts (KEEP IN VERSION CONTROL)
|
||||
│ └── *.py # Individual migration files
|
||||
├── env.py # Alembic environment configuration
|
||||
├── script.py.mako # Template for new migration files
|
||||
└── README.md # This file
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
|
||||
### 1. Create a New Migration
|
||||
|
||||
After making changes to `models.py`, generate a migration:
|
||||
|
||||
```bash
|
||||
cd backend
|
||||
alembic revision --autogenerate -m "add_user_bio_field"
|
||||
```
|
||||
|
||||
This will create a new file in `alembic/versions/` like:
|
||||
```
|
||||
3e02c74581c9_add_user_bio_field.py
|
||||
```
|
||||
|
||||
### 2. Review the Generated Migration
|
||||
|
||||
**IMPORTANT:** Always review auto-generated migrations before applying them!
|
||||
|
||||
```bash
|
||||
# Open the latest migration file
|
||||
cat alembic/versions/3e02c74581c9_add_user_bio_field.py
|
||||
```
|
||||
|
||||
Check:
|
||||
- ✅ The `upgrade()` function contains the correct changes
|
||||
- ✅ The `downgrade()` function properly reverses those changes
|
||||
- ✅ No unintended table drops or data loss
|
||||
|
||||
### 3. Apply the Migration
|
||||
|
||||
```bash
|
||||
# Apply all pending migrations
|
||||
alembic upgrade head
|
||||
|
||||
# Or apply migrations one at a time
|
||||
alembic upgrade +1
|
||||
```
|
||||
|
||||
### 4. Rollback a Migration
|
||||
|
||||
```bash
|
||||
# Rollback the last migration
|
||||
alembic downgrade -1
|
||||
|
||||
# Rollback to a specific revision
|
||||
alembic downgrade 3e02c74581c9
|
||||
```
|
||||
|
||||
## Common Commands
|
||||
|
||||
| Command | Description |
|
||||
|---------|-------------|
|
||||
| `alembic current` | Show current migration revision |
|
||||
| `alembic history` | Show migration history |
|
||||
| `alembic heads` | Show head revisions |
|
||||
| `alembic upgrade head` | Apply all pending migrations |
|
||||
| `alembic downgrade -1` | Rollback last migration |
|
||||
| `alembic revision --autogenerate -m "message"` | Create new migration |
|
||||
| `alembic stamp head` | Mark database as up-to-date without running migrations |
|
||||
|
||||
## Migration Workflow
|
||||
|
||||
### For Development
|
||||
|
||||
1. **Make changes to `models.py`**
|
||||
```python
|
||||
# In models.py
|
||||
class User(Base):
|
||||
# ...existing fields...
|
||||
bio = Column(Text, nullable=True) # New field
|
||||
```
|
||||
|
||||
2. **Generate migration**
|
||||
```bash
|
||||
alembic revision --autogenerate -m "add_user_bio_field"
|
||||
```
|
||||
|
||||
3. **Review the generated file**
|
||||
```python
|
||||
# In alembic/versions/xxxxx_add_user_bio_field.py
|
||||
def upgrade():
|
||||
op.add_column('users', sa.Column('bio', sa.Text(), nullable=True))
|
||||
|
||||
def downgrade():
|
||||
op.drop_column('users', 'bio')
|
||||
```
|
||||
|
||||
4. **Apply migration**
|
||||
```bash
|
||||
alembic upgrade head
|
||||
```
|
||||
|
||||
5. **Commit migration file to Git**
|
||||
```bash
|
||||
git add alembic/versions/xxxxx_add_user_bio_field.py
|
||||
git commit -m "Add user bio field"
|
||||
```
|
||||
|
||||
### For Production Deployment
|
||||
|
||||
**Fresh Database (New Installation):**
|
||||
```bash
|
||||
# 1. Create database
|
||||
createdb membership_db
|
||||
|
||||
# 2. Run initial schema SQL (creates all 17 tables)
|
||||
psql -U username -d membership_db -f ../migrations/000_initial_schema.sql
|
||||
|
||||
# 3. Mark database as up-to-date with Alembic
|
||||
alembic stamp head
|
||||
|
||||
# 4. Verify
|
||||
alembic current # Should show: 001_initial_baseline (head)
|
||||
```
|
||||
|
||||
**Existing Database (Apply New Migrations):**
|
||||
```bash
|
||||
# 1. Pull latest code
|
||||
git pull origin main
|
||||
|
||||
# 2. Apply migrations
|
||||
alembic upgrade head
|
||||
|
||||
# 3. Verify
|
||||
alembic current
|
||||
|
||||
# 4. Restart application
|
||||
systemctl restart membership-backend
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Database Connection
|
||||
|
||||
Alembic reads the `DATABASE_URL` from your `.env` file:
|
||||
|
||||
```env
|
||||
DATABASE_URL=postgresql://user:password@localhost:5432/membership_db
|
||||
```
|
||||
|
||||
The connection is configured in `alembic/env.py` (lines 29-36).
|
||||
|
||||
### Target Metadata
|
||||
|
||||
Alembic uses `Base.metadata` from `models.py` to detect changes:
|
||||
|
||||
```python
|
||||
# In alembic/env.py
|
||||
from models import Base
|
||||
target_metadata = Base.metadata
|
||||
```
|
||||
|
||||
## Important Notes
|
||||
|
||||
### ✅ DO:
|
||||
- Always review auto-generated migrations before applying
|
||||
- Test migrations in development before production
|
||||
- Commit migration files to version control
|
||||
- Write descriptive migration messages
|
||||
- Include both `upgrade()` and `downgrade()` functions
|
||||
|
||||
### ❌ DON'T:
|
||||
- Don't edit migration files after they've been applied in production
|
||||
- Don't delete migration files from `alembic/versions/`
|
||||
- Don't modify the `revision` or `down_revision` values
|
||||
- Don't commit `.pyc` files (already in .gitignore)
|
||||
|
||||
## Migration History
|
||||
|
||||
| Revision | Description | Date | Type |
|
||||
|----------|-------------|------|------|
|
||||
| `001_initial_baseline` | Baseline marker (empty migration) | 2026-01-02 | Baseline |
|
||||
|
||||
**Note:** The actual initial schema is created by running `backend/migrations/000_initial_schema.sql`. The baseline migration is an empty marker that indicates the starting point for Alembic tracking.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "Target database is not up to date"
|
||||
|
||||
```bash
|
||||
# Check current revision
|
||||
alembic current
|
||||
|
||||
# Check pending migrations
|
||||
alembic history
|
||||
|
||||
# Apply missing migrations
|
||||
alembic upgrade head
|
||||
```
|
||||
|
||||
### "FAILED: Can't locate revision identified by 'xxxxx'"
|
||||
|
||||
The database thinks it's at a revision that doesn't exist in your `alembic/versions/`.
|
||||
|
||||
**Solution:**
|
||||
```bash
|
||||
# Mark database at a known good revision
|
||||
alembic stamp head
|
||||
```
|
||||
|
||||
### Migration conflicts
|
||||
|
||||
If you get merge conflicts in migration files:
|
||||
|
||||
1. Resolve conflicts in the migration file
|
||||
2. Ensure `revision` and `down_revision` chain is correct
|
||||
3. Test the migration locally
|
||||
|
||||
### Fresh database setup
|
||||
|
||||
For a completely new database:
|
||||
|
||||
```bash
|
||||
# Step 1: Run initial schema SQL
|
||||
psql -U username -d membership_db -f ../migrations/000_initial_schema.sql
|
||||
|
||||
# Step 2: Mark as up-to-date
|
||||
alembic stamp head
|
||||
|
||||
# Step 3: Verify
|
||||
alembic current # Should show: 001_initial_baseline (head)
|
||||
```
|
||||
|
||||
## Legacy Migrations
|
||||
|
||||
Old numbered SQL migrations (`000_initial_schema.sql` through `011_wordpress_import_enhancements.sql`) are preserved in `backend/migrations/` for reference. These have been consolidated into the initial Alembic migration.
|
||||
|
||||
**Going forward, all new migrations must use Alembic.**
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- [Alembic Documentation](https://alembic.sqlalchemy.org/)
|
||||
- [SQLAlchemy Documentation](https://docs.sqlalchemy.org/)
|
||||
- [PostgreSQL Documentation](https://www.postgresql.org/docs/)
|
||||
96
alembic/env.py
Normal file
96
alembic/env.py
Normal file
@@ -0,0 +1,96 @@
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
from alembic import context
|
||||
from dotenv import load_dotenv
|
||||
|
||||
# Add the parent directory to the path so we can import our models
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
|
||||
|
||||
# Load environment variables from .env file
|
||||
load_dotenv()
|
||||
|
||||
# Import all models so Alembic can detect them
|
||||
from models import Base
|
||||
import models # This ensures all models are imported
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set the SQLAlchemy URL from environment variable
|
||||
database_url = os.getenv("DATABASE_URL")
|
||||
if database_url:
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
else:
|
||||
raise ValueError(
|
||||
"DATABASE_URL environment variable not set. "
|
||||
"Please create a .env file with DATABASE_URL=postgresql://user:password@host:port/dbname"
|
||||
)
|
||||
|
||||
# Add your model's MetaData object here for 'autogenerate' support
|
||||
target_metadata = Base.metadata
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=True, # Detect type changes
|
||||
compare_server_default=True, # Detect default value changes
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True, # Detect type changes
|
||||
compare_server_default=True, # Detect default value changes
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
26
alembic/script.py.mako
Normal file
26
alembic/script.py.mako
Normal file
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
59
alembic/versions/001_initial_baseline.py
Normal file
59
alembic/versions/001_initial_baseline.py
Normal file
@@ -0,0 +1,59 @@
|
||||
"""initial_baseline - Use 000_initial_schema.sql for fresh deployments
|
||||
|
||||
Revision ID: 001_initial_baseline
|
||||
Revises:
|
||||
Create Date: 2026-01-02 16:45:00.000000
|
||||
|
||||
IMPORTANT: This is a baseline migration for existing databases.
|
||||
|
||||
For FRESH deployments:
|
||||
1. Run: psql -U user -d dbname -f backend/migrations/000_initial_schema.sql
|
||||
2. Run: alembic stamp head
|
||||
|
||||
For EXISTING deployments (already have database):
|
||||
1. Run: alembic stamp head (marks database as up-to-date)
|
||||
|
||||
This migration intentionally does NOTHING because:
|
||||
- Fresh deployments use 000_initial_schema.sql to create all tables
|
||||
- Existing deployments already have all tables from 000_initial_schema.sql
|
||||
- Future migrations will be incremental changes from this baseline
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '001_initial_baseline'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""
|
||||
This migration does nothing.
|
||||
|
||||
It serves as a baseline marker that indicates:
|
||||
- All 17 tables exist (users, events, subscriptions, etc.)
|
||||
- All 8 enums are defined (UserStatus, UserRole, etc.)
|
||||
- All indexes and constraints are in place
|
||||
|
||||
The actual schema is created by running:
|
||||
backend/migrations/000_initial_schema.sql
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""
|
||||
Cannot downgrade below baseline.
|
||||
|
||||
If you need to completely reset the database:
|
||||
1. dropdb dbname
|
||||
2. createdb dbname
|
||||
3. psql -U user -d dbname -f backend/migrations/000_initial_schema.sql
|
||||
4. alembic stamp head
|
||||
"""
|
||||
pass
|
||||
92
alembic/versions/002_add_missing_user_fields.py
Normal file
92
alembic/versions/002_add_missing_user_fields.py
Normal file
@@ -0,0 +1,92 @@
|
||||
"""add_missing_user_fields
|
||||
|
||||
Revision ID: 002_add_missing_user_fields
|
||||
Revises: 001_initial_baseline
|
||||
Create Date: 2026-01-04
|
||||
|
||||
Adds missing user fields to sync models.py with database:
|
||||
- scholarship_reason
|
||||
- directory_* fields (email, bio, address, phone, dob, partner_name)
|
||||
- profile_photo_url (rename from profile_image_url)
|
||||
- social_media_* fields (facebook, instagram, twitter, linkedin)
|
||||
- email_verification_expires
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '002_add_missing_user_fields'
|
||||
down_revision: Union[str, None] = '001_initial_baseline'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Add missing user fields (skip if already exists)"""
|
||||
from sqlalchemy import inspect
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
existing_columns = {col['name'] for col in inspector.get_columns('users')}
|
||||
|
||||
# Add scholarship_reason
|
||||
if 'scholarship_reason' not in existing_columns:
|
||||
op.add_column('users', sa.Column('scholarship_reason', sa.Text(), nullable=True))
|
||||
|
||||
# Add directory fields
|
||||
if 'directory_email' not in existing_columns:
|
||||
op.add_column('users', sa.Column('directory_email', sa.String(), nullable=True))
|
||||
if 'directory_bio' not in existing_columns:
|
||||
op.add_column('users', sa.Column('directory_bio', sa.Text(), nullable=True))
|
||||
if 'directory_address' not in existing_columns:
|
||||
op.add_column('users', sa.Column('directory_address', sa.String(), nullable=True))
|
||||
if 'directory_phone' not in existing_columns:
|
||||
op.add_column('users', sa.Column('directory_phone', sa.String(), nullable=True))
|
||||
if 'directory_dob' not in existing_columns:
|
||||
op.add_column('users', sa.Column('directory_dob', sa.DateTime(), nullable=True))
|
||||
if 'directory_partner_name' not in existing_columns:
|
||||
op.add_column('users', sa.Column('directory_partner_name', sa.String(), nullable=True))
|
||||
|
||||
# Rename profile_image_url to profile_photo_url (skip if already renamed)
|
||||
if 'profile_image_url' in existing_columns and 'profile_photo_url' not in existing_columns:
|
||||
op.alter_column('users', 'profile_image_url', new_column_name='profile_photo_url')
|
||||
|
||||
# Add social media fields
|
||||
if 'social_media_facebook' not in existing_columns:
|
||||
op.add_column('users', sa.Column('social_media_facebook', sa.String(), nullable=True))
|
||||
if 'social_media_instagram' not in existing_columns:
|
||||
op.add_column('users', sa.Column('social_media_instagram', sa.String(), nullable=True))
|
||||
if 'social_media_twitter' not in existing_columns:
|
||||
op.add_column('users', sa.Column('social_media_twitter', sa.String(), nullable=True))
|
||||
if 'social_media_linkedin' not in existing_columns:
|
||||
op.add_column('users', sa.Column('social_media_linkedin', sa.String(), nullable=True))
|
||||
|
||||
# Add email_verification_expires if missing
|
||||
if 'email_verification_expires' not in existing_columns:
|
||||
op.add_column('users', sa.Column('email_verification_expires', sa.DateTime(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Remove added fields (rollback)"""
|
||||
|
||||
# Remove social media fields
|
||||
op.drop_column('users', 'social_media_linkedin')
|
||||
op.drop_column('users', 'social_media_twitter')
|
||||
op.drop_column('users', 'social_media_instagram')
|
||||
op.drop_column('users', 'social_media_facebook')
|
||||
|
||||
# Rename profile_photo_url back to profile_image_url
|
||||
op.alter_column('users', 'profile_photo_url', new_column_name='profile_image_url')
|
||||
|
||||
# Remove directory fields
|
||||
op.drop_column('users', 'directory_partner_name')
|
||||
op.drop_column('users', 'directory_dob')
|
||||
op.drop_column('users', 'directory_phone')
|
||||
op.drop_column('users', 'directory_address')
|
||||
op.drop_column('users', 'directory_bio')
|
||||
op.drop_column('users', 'directory_email')
|
||||
|
||||
# Remove scholarship_reason
|
||||
op.drop_column('users', 'scholarship_reason')
|
||||
50
alembic/versions/003_add_user_invitation_fields.py
Normal file
50
alembic/versions/003_add_user_invitation_fields.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""add_user_invitation_fields
|
||||
|
||||
Revision ID: 003_add_user_invitation_fields
|
||||
Revises: 002_add_missing_user_fields
|
||||
Create Date: 2026-01-04
|
||||
|
||||
Adds optional pre-filled fields to user_invitations table:
|
||||
- first_name
|
||||
- last_name
|
||||
- phone
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '003_add_user_invitation_fields'
|
||||
down_revision: Union[str, None] = '002_add_missing_user_fields'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Add optional pre-filled information fields to user_invitations (skip if already exists)"""
|
||||
from sqlalchemy import inspect
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
existing_columns = {col['name'] for col in inspector.get_columns('user_invitations')}
|
||||
|
||||
# Add first_name if missing
|
||||
if 'first_name' not in existing_columns:
|
||||
op.add_column('user_invitations', sa.Column('first_name', sa.String(), nullable=True))
|
||||
|
||||
# Add last_name if missing
|
||||
if 'last_name' not in existing_columns:
|
||||
op.add_column('user_invitations', sa.Column('last_name', sa.String(), nullable=True))
|
||||
|
||||
# Add phone if missing
|
||||
if 'phone' not in existing_columns:
|
||||
op.add_column('user_invitations', sa.Column('phone', sa.String(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Remove added fields (rollback)"""
|
||||
|
||||
op.drop_column('user_invitations', 'phone')
|
||||
op.drop_column('user_invitations', 'last_name')
|
||||
op.drop_column('user_invitations', 'first_name')
|
||||
52
alembic/versions/004_add_document_file_sizes.py
Normal file
52
alembic/versions/004_add_document_file_sizes.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""add_document_file_sizes
|
||||
|
||||
Revision ID: 004_add_document_file_sizes
|
||||
Revises: 003_add_user_invitation_fields
|
||||
Create Date: 2026-01-04
|
||||
|
||||
Adds file_size_bytes to all document tables:
|
||||
- newsletter_archives
|
||||
- financial_reports
|
||||
- bylaws_documents
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '004_add_document_file_sizes'
|
||||
down_revision: Union[str, None] = '003_add_user_invitation_fields'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Add file_size_bytes column to document tables (skip if already exists)"""
|
||||
from sqlalchemy import inspect
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
|
||||
# Add to newsletter_archives if missing
|
||||
existing_columns = {col['name'] for col in inspector.get_columns('newsletter_archives')}
|
||||
if 'file_size_bytes' not in existing_columns:
|
||||
op.add_column('newsletter_archives', sa.Column('file_size_bytes', sa.Integer(), nullable=True))
|
||||
|
||||
# Add to financial_reports if missing
|
||||
existing_columns = {col['name'] for col in inspector.get_columns('financial_reports')}
|
||||
if 'file_size_bytes' not in existing_columns:
|
||||
op.add_column('financial_reports', sa.Column('file_size_bytes', sa.Integer(), nullable=True))
|
||||
|
||||
# Add to bylaws_documents if missing
|
||||
existing_columns = {col['name'] for col in inspector.get_columns('bylaws_documents')}
|
||||
if 'file_size_bytes' not in existing_columns:
|
||||
op.add_column('bylaws_documents', sa.Column('file_size_bytes', sa.Integer(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Remove file_size_bytes columns (rollback)"""
|
||||
|
||||
op.drop_column('bylaws_documents', 'file_size_bytes')
|
||||
op.drop_column('financial_reports', 'file_size_bytes')
|
||||
op.drop_column('newsletter_archives', 'file_size_bytes')
|
||||
81
alembic/versions/005_fix_subscriptions_and_storage.py
Normal file
81
alembic/versions/005_fix_subscriptions_and_storage.py
Normal file
@@ -0,0 +1,81 @@
|
||||
"""fix_subscriptions_and_storage
|
||||
|
||||
Revision ID: 005_fix_subs_storage
|
||||
Revises: 004_add_document_file_sizes
|
||||
Create Date: 2026-01-04
|
||||
|
||||
Fixes:
|
||||
- Add missing columns to subscriptions table
|
||||
- Rename storage_usage.last_calculated_at to last_updated
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '005_fix_subs_storage'
|
||||
down_revision: Union[str, None] = '004_add_document_file_sizes'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Add missing columns and fix naming (skip if already exists)"""
|
||||
from sqlalchemy import inspect
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
|
||||
# Check existing columns in subscriptions table
|
||||
existing_columns = {col['name'] for col in inspector.get_columns('subscriptions')}
|
||||
|
||||
# Add missing columns to subscriptions table only if they don't exist
|
||||
if 'start_date' not in existing_columns:
|
||||
op.add_column('subscriptions', sa.Column('start_date', sa.DateTime(timezone=True), nullable=True))
|
||||
if 'end_date' not in existing_columns:
|
||||
op.add_column('subscriptions', sa.Column('end_date', sa.DateTime(timezone=True), nullable=True))
|
||||
if 'amount_paid_cents' not in existing_columns:
|
||||
op.add_column('subscriptions', sa.Column('amount_paid_cents', sa.Integer(), nullable=True))
|
||||
if 'manual_payment_notes' not in existing_columns:
|
||||
op.add_column('subscriptions', sa.Column('manual_payment_notes', sa.Text(), nullable=True))
|
||||
if 'manual_payment_admin_id' not in existing_columns:
|
||||
op.add_column('subscriptions', sa.Column('manual_payment_admin_id', UUID(as_uuid=True), nullable=True))
|
||||
if 'manual_payment_date' not in existing_columns:
|
||||
op.add_column('subscriptions', sa.Column('manual_payment_date', sa.DateTime(timezone=True), nullable=True))
|
||||
if 'payment_method' not in existing_columns:
|
||||
op.add_column('subscriptions', sa.Column('payment_method', sa.String(50), nullable=True))
|
||||
|
||||
# Add foreign key for manual_payment_admin_id if it doesn't exist
|
||||
existing_fks = [fk['name'] for fk in inspector.get_foreign_keys('subscriptions')]
|
||||
if 'subscriptions_manual_payment_admin_id_fkey' not in existing_fks:
|
||||
op.create_foreign_key(
|
||||
'subscriptions_manual_payment_admin_id_fkey',
|
||||
'subscriptions', 'users',
|
||||
['manual_payment_admin_id'], ['id']
|
||||
)
|
||||
|
||||
# Rename storage_usage.last_calculated_at to last_updated (only if needed)
|
||||
storage_columns = {col['name'] for col in inspector.get_columns('storage_usage')}
|
||||
if 'last_calculated_at' in storage_columns and 'last_updated' not in storage_columns:
|
||||
op.alter_column('storage_usage', 'last_calculated_at', new_column_name='last_updated')
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Remove added columns (rollback)"""
|
||||
|
||||
# Rename back
|
||||
op.alter_column('storage_usage', 'last_updated', new_column_name='last_calculated_at')
|
||||
|
||||
# Drop foreign key first
|
||||
op.drop_constraint('subscriptions_manual_payment_admin_id_fkey', 'subscriptions', type_='foreignkey')
|
||||
|
||||
# Drop columns from subscriptions
|
||||
op.drop_column('subscriptions', 'payment_method')
|
||||
op.drop_column('subscriptions', 'manual_payment_date')
|
||||
op.drop_column('subscriptions', 'manual_payment_admin_id')
|
||||
op.drop_column('subscriptions', 'manual_payment_notes')
|
||||
op.drop_column('subscriptions', 'amount_paid_cents')
|
||||
op.drop_column('subscriptions', 'end_date')
|
||||
op.drop_column('subscriptions', 'start_date')
|
||||
37
alembic/versions/006_rename_is_active.py
Normal file
37
alembic/versions/006_rename_is_active.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""rename_is_active
|
||||
|
||||
Revision ID: 006_rename_active
|
||||
Revises: 005_fix_subs_storage
|
||||
Create Date: 2026-01-04
|
||||
|
||||
Fixes:
|
||||
- Rename subscription_plans.is_active to active (match models.py)
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '006_rename_active'
|
||||
down_revision: Union[str, None] = '005_fix_subs_storage'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Rename is_active to active (skip if already renamed)"""
|
||||
from sqlalchemy import inspect
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
|
||||
# Check if rename is needed
|
||||
existing_columns = {col['name'] for col in inspector.get_columns('subscription_plans')}
|
||||
if 'is_active' in existing_columns and 'active' not in existing_columns:
|
||||
op.alter_column('subscription_plans', 'is_active', new_column_name='active')
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Rename back to is_active"""
|
||||
op.alter_column('subscription_plans', 'active', new_column_name='is_active')
|
||||
65
alembic/versions/007_add_subscription_plan_fields.py
Normal file
65
alembic/versions/007_add_subscription_plan_fields.py
Normal file
@@ -0,0 +1,65 @@
|
||||
"""add_subscription_plan_fields
|
||||
|
||||
Revision ID: 007_add_sub_fields
|
||||
Revises: 006_rename_active
|
||||
Create Date: 2026-01-04
|
||||
|
||||
Fixes:
|
||||
- Add missing columns to subscription_plans table
|
||||
(custom cycle fields, dynamic pricing fields)
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '007_add_sub_fields'
|
||||
down_revision: Union[str, None] = '006_rename_active'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Add missing columns to subscription_plans (skip if already exists)"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import inspect
|
||||
|
||||
# Get database connection
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
existing_columns = {col['name'] for col in inspector.get_columns('subscription_plans')}
|
||||
|
||||
# Custom billing cycle fields
|
||||
if 'custom_cycle_enabled' not in existing_columns:
|
||||
op.add_column('subscription_plans', sa.Column('custom_cycle_enabled', sa.Boolean(), nullable=False, server_default='false'))
|
||||
if 'custom_cycle_start_month' not in existing_columns:
|
||||
op.add_column('subscription_plans', sa.Column('custom_cycle_start_month', sa.Integer(), nullable=True))
|
||||
if 'custom_cycle_start_day' not in existing_columns:
|
||||
op.add_column('subscription_plans', sa.Column('custom_cycle_start_day', sa.Integer(), nullable=True))
|
||||
if 'custom_cycle_end_month' not in existing_columns:
|
||||
op.add_column('subscription_plans', sa.Column('custom_cycle_end_month', sa.Integer(), nullable=True))
|
||||
if 'custom_cycle_end_day' not in existing_columns:
|
||||
op.add_column('subscription_plans', sa.Column('custom_cycle_end_day', sa.Integer(), nullable=True))
|
||||
|
||||
# Dynamic pricing fields
|
||||
if 'minimum_price_cents' not in existing_columns:
|
||||
op.add_column('subscription_plans', sa.Column('minimum_price_cents', sa.Integer(), nullable=False, server_default='3000'))
|
||||
if 'suggested_price_cents' not in existing_columns:
|
||||
op.add_column('subscription_plans', sa.Column('suggested_price_cents', sa.Integer(), nullable=True))
|
||||
if 'allow_donation' not in existing_columns:
|
||||
op.add_column('subscription_plans', sa.Column('allow_donation', sa.Boolean(), nullable=False, server_default='true'))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Remove added columns (rollback)"""
|
||||
|
||||
op.drop_column('subscription_plans', 'allow_donation')
|
||||
op.drop_column('subscription_plans', 'suggested_price_cents')
|
||||
op.drop_column('subscription_plans', 'minimum_price_cents')
|
||||
op.drop_column('subscription_plans', 'custom_cycle_end_day')
|
||||
op.drop_column('subscription_plans', 'custom_cycle_end_month')
|
||||
op.drop_column('subscription_plans', 'custom_cycle_start_day')
|
||||
op.drop_column('subscription_plans', 'custom_cycle_start_month')
|
||||
op.drop_column('subscription_plans', 'custom_cycle_enabled')
|
||||
55
alembic/versions/008_add_donation_columns.py
Normal file
55
alembic/versions/008_add_donation_columns.py
Normal file
@@ -0,0 +1,55 @@
|
||||
"""add_donation_columns
|
||||
|
||||
Revision ID: 008_add_donations
|
||||
Revises: 007_add_sub_fields
|
||||
Create Date: 2026-01-04
|
||||
|
||||
Fixes:
|
||||
- Add missing Stripe payment columns to donations table
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import inspect
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '008_add_donations'
|
||||
down_revision: Union[str, None] = '007_add_sub_fields'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Add missing columns to donations table (skip if already exists)"""
|
||||
|
||||
# Get database connection
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
existing_columns = {col['name'] for col in inspector.get_columns('donations')}
|
||||
|
||||
# Stripe payment columns
|
||||
if 'stripe_checkout_session_id' not in existing_columns:
|
||||
op.add_column('donations', sa.Column('stripe_checkout_session_id', sa.String(), nullable=True))
|
||||
|
||||
if 'stripe_payment_intent_id' not in existing_columns:
|
||||
op.add_column('donations', sa.Column('stripe_payment_intent_id', sa.String(), nullable=True))
|
||||
|
||||
if 'payment_method' not in existing_columns:
|
||||
op.add_column('donations', sa.Column('payment_method', sa.String(), nullable=True))
|
||||
|
||||
if 'notes' not in existing_columns:
|
||||
op.add_column('donations', sa.Column('notes', sa.Text(), nullable=True))
|
||||
|
||||
if 'updated_at' not in existing_columns:
|
||||
op.add_column('donations', sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Remove added columns (rollback)"""
|
||||
|
||||
op.drop_column('donations', 'updated_at')
|
||||
op.drop_column('donations', 'notes')
|
||||
op.drop_column('donations', 'payment_method')
|
||||
op.drop_column('donations', 'stripe_payment_intent_id')
|
||||
op.drop_column('donations', 'stripe_checkout_session_id')
|
||||
237
alembic/versions/009_add_all_missing_columns.py
Normal file
237
alembic/versions/009_add_all_missing_columns.py
Normal file
@@ -0,0 +1,237 @@
|
||||
"""add_all_missing_columns
|
||||
|
||||
Revision ID: 009_add_all_missing
|
||||
Revises: 008_add_donations
|
||||
Create Date: 2026-01-04
|
||||
|
||||
Fixes:
|
||||
- Add ALL remaining missing columns across all tables
|
||||
- Users: newsletter preferences, volunteer, scholarship, directory, password reset, ToS, member_since, reminders, rejection, import tracking
|
||||
- Events: calendar_uid
|
||||
- Subscriptions: base_subscription_cents, donation_cents, manual_payment
|
||||
- ImportJobs: WordPress import fields
|
||||
- Create ImportRollbackAudit table if not exists
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy import inspect
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '009_add_all_missing'
|
||||
down_revision: Union[str, None] = '008_add_donations'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Add all missing columns across all tables"""
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
|
||||
# ============================================================
|
||||
# 1. USERS TABLE - Add ~28 missing columns
|
||||
# ============================================================
|
||||
users_columns = {col['name'] for col in inspector.get_columns('users')}
|
||||
|
||||
# Newsletter publication preferences
|
||||
if 'newsletter_publish_name' not in users_columns:
|
||||
op.add_column('users', sa.Column('newsletter_publish_name', sa.Boolean(), nullable=False, server_default='false'))
|
||||
if 'newsletter_publish_photo' not in users_columns:
|
||||
op.add_column('users', sa.Column('newsletter_publish_photo', sa.Boolean(), nullable=False, server_default='false'))
|
||||
if 'newsletter_publish_birthday' not in users_columns:
|
||||
op.add_column('users', sa.Column('newsletter_publish_birthday', sa.Boolean(), nullable=False, server_default='false'))
|
||||
if 'newsletter_publish_none' not in users_columns:
|
||||
op.add_column('users', sa.Column('newsletter_publish_none', sa.Boolean(), nullable=False, server_default='false'))
|
||||
|
||||
# Volunteer interests
|
||||
if 'volunteer_interests' not in users_columns:
|
||||
op.add_column('users', sa.Column('volunteer_interests', sa.JSON(), nullable=True, server_default='[]'))
|
||||
|
||||
# Scholarship
|
||||
if 'scholarship_requested' not in users_columns:
|
||||
op.add_column('users', sa.Column('scholarship_requested', sa.Boolean(), nullable=False, server_default='false'))
|
||||
|
||||
# Directory
|
||||
if 'show_in_directory' not in users_columns:
|
||||
op.add_column('users', sa.Column('show_in_directory', sa.Boolean(), nullable=False, server_default='false'))
|
||||
|
||||
# Password reset
|
||||
if 'password_reset_token' not in users_columns:
|
||||
op.add_column('users', sa.Column('password_reset_token', sa.String(), nullable=True))
|
||||
if 'password_reset_expires' not in users_columns:
|
||||
op.add_column('users', sa.Column('password_reset_expires', sa.DateTime(), nullable=True))
|
||||
if 'force_password_change' not in users_columns:
|
||||
op.add_column('users', sa.Column('force_password_change', sa.Boolean(), nullable=False, server_default='false'))
|
||||
|
||||
# Terms of Service
|
||||
if 'accepts_tos' not in users_columns:
|
||||
op.add_column('users', sa.Column('accepts_tos', sa.Boolean(), nullable=False, server_default='false'))
|
||||
if 'tos_accepted_at' not in users_columns:
|
||||
op.add_column('users', sa.Column('tos_accepted_at', sa.DateTime(), nullable=True))
|
||||
|
||||
# Member since
|
||||
if 'member_since' not in users_columns:
|
||||
op.add_column('users', sa.Column('member_since', sa.DateTime(), nullable=True))
|
||||
|
||||
# Email verification reminders
|
||||
if 'email_verification_reminders_sent' not in users_columns:
|
||||
op.add_column('users', sa.Column('email_verification_reminders_sent', sa.Integer(), nullable=False, server_default='0'))
|
||||
if 'last_email_verification_reminder_at' not in users_columns:
|
||||
op.add_column('users', sa.Column('last_email_verification_reminder_at', sa.DateTime(), nullable=True))
|
||||
|
||||
# Event attendance reminders
|
||||
if 'event_attendance_reminders_sent' not in users_columns:
|
||||
op.add_column('users', sa.Column('event_attendance_reminders_sent', sa.Integer(), nullable=False, server_default='0'))
|
||||
if 'last_event_attendance_reminder_at' not in users_columns:
|
||||
op.add_column('users', sa.Column('last_event_attendance_reminder_at', sa.DateTime(), nullable=True))
|
||||
|
||||
# Payment reminders
|
||||
if 'payment_reminders_sent' not in users_columns:
|
||||
op.add_column('users', sa.Column('payment_reminders_sent', sa.Integer(), nullable=False, server_default='0'))
|
||||
if 'last_payment_reminder_at' not in users_columns:
|
||||
op.add_column('users', sa.Column('last_payment_reminder_at', sa.DateTime(), nullable=True))
|
||||
|
||||
# Renewal reminders
|
||||
if 'renewal_reminders_sent' not in users_columns:
|
||||
op.add_column('users', sa.Column('renewal_reminders_sent', sa.Integer(), nullable=False, server_default='0'))
|
||||
if 'last_renewal_reminder_at' not in users_columns:
|
||||
op.add_column('users', sa.Column('last_renewal_reminder_at', sa.DateTime(), nullable=True))
|
||||
|
||||
# Rejection tracking
|
||||
if 'rejection_reason' not in users_columns:
|
||||
op.add_column('users', sa.Column('rejection_reason', sa.Text(), nullable=True))
|
||||
if 'rejected_at' not in users_columns:
|
||||
op.add_column('users', sa.Column('rejected_at', sa.DateTime(timezone=True), nullable=True))
|
||||
if 'rejected_by' not in users_columns:
|
||||
op.add_column('users', sa.Column('rejected_by', UUID(as_uuid=True), nullable=True))
|
||||
# Note: Foreign key constraint skipped to avoid circular dependency issues
|
||||
|
||||
# WordPress import tracking
|
||||
if 'import_source' not in users_columns:
|
||||
op.add_column('users', sa.Column('import_source', sa.String(50), nullable=True))
|
||||
if 'import_job_id' not in users_columns:
|
||||
op.add_column('users', sa.Column('import_job_id', UUID(as_uuid=True), nullable=True))
|
||||
# Note: Foreign key will be added after import_jobs table is updated
|
||||
if 'wordpress_user_id' not in users_columns:
|
||||
op.add_column('users', sa.Column('wordpress_user_id', sa.BigInteger(), nullable=True))
|
||||
if 'wordpress_registered_date' not in users_columns:
|
||||
op.add_column('users', sa.Column('wordpress_registered_date', sa.DateTime(timezone=True), nullable=True))
|
||||
|
||||
# ============================================================
|
||||
# 2. EVENTS TABLE - Add calendar_uid
|
||||
# ============================================================
|
||||
events_columns = {col['name'] for col in inspector.get_columns('events')}
|
||||
|
||||
if 'calendar_uid' not in events_columns:
|
||||
op.add_column('events', sa.Column('calendar_uid', sa.String(), nullable=True))
|
||||
|
||||
# ============================================================
|
||||
# 3. SUBSCRIPTIONS TABLE - Add donation tracking
|
||||
# ============================================================
|
||||
subscriptions_columns = {col['name'] for col in inspector.get_columns('subscriptions')}
|
||||
|
||||
if 'base_subscription_cents' not in subscriptions_columns:
|
||||
op.add_column('subscriptions', sa.Column('base_subscription_cents', sa.Integer(), nullable=True))
|
||||
# Update existing rows: base_subscription_cents = amount_paid_cents - donation_cents (default 0)
|
||||
op.execute("UPDATE subscriptions SET base_subscription_cents = COALESCE(amount_paid_cents, 0) WHERE base_subscription_cents IS NULL")
|
||||
# Make it non-nullable after populating
|
||||
op.alter_column('subscriptions', 'base_subscription_cents', nullable=False)
|
||||
|
||||
if 'donation_cents' not in subscriptions_columns:
|
||||
op.add_column('subscriptions', sa.Column('donation_cents', sa.Integer(), nullable=False, server_default='0'))
|
||||
|
||||
if 'manual_payment' not in subscriptions_columns:
|
||||
op.add_column('subscriptions', sa.Column('manual_payment', sa.Boolean(), nullable=False, server_default='false'))
|
||||
|
||||
# ============================================================
|
||||
# 4. IMPORT_JOBS TABLE - Add WordPress import fields
|
||||
# ============================================================
|
||||
import_jobs_columns = {col['name'] for col in inspector.get_columns('import_jobs')}
|
||||
|
||||
if 'field_mapping' not in import_jobs_columns:
|
||||
op.add_column('import_jobs', sa.Column('field_mapping', sa.JSON(), nullable=False, server_default='{}'))
|
||||
|
||||
if 'wordpress_metadata' not in import_jobs_columns:
|
||||
op.add_column('import_jobs', sa.Column('wordpress_metadata', sa.JSON(), nullable=False, server_default='{}'))
|
||||
|
||||
if 'imported_user_ids' not in import_jobs_columns:
|
||||
op.add_column('import_jobs', sa.Column('imported_user_ids', sa.JSON(), nullable=False, server_default='[]'))
|
||||
|
||||
if 'rollback_at' not in import_jobs_columns:
|
||||
op.add_column('import_jobs', sa.Column('rollback_at', sa.DateTime(), nullable=True))
|
||||
|
||||
if 'rollback_by' not in import_jobs_columns:
|
||||
op.add_column('import_jobs', sa.Column('rollback_by', UUID(as_uuid=True), nullable=True))
|
||||
# Foreign key will be added if needed
|
||||
|
||||
# ============================================================
|
||||
# 5. CREATE IMPORT_ROLLBACK_AUDIT TABLE
|
||||
# ============================================================
|
||||
if 'import_rollback_audit' not in inspector.get_table_names():
|
||||
op.create_table(
|
||||
'import_rollback_audit',
|
||||
sa.Column('id', UUID(as_uuid=True), primary_key=True),
|
||||
sa.Column('import_job_id', UUID(as_uuid=True), sa.ForeignKey('import_jobs.id'), nullable=False),
|
||||
sa.Column('rolled_back_by', UUID(as_uuid=True), sa.ForeignKey('users.id'), nullable=False),
|
||||
sa.Column('rolled_back_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('deleted_user_count', sa.Integer(), nullable=False),
|
||||
sa.Column('deleted_user_ids', sa.JSON(), nullable=False),
|
||||
sa.Column('reason', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False)
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Remove all added columns and tables"""
|
||||
|
||||
# Drop import_rollback_audit table
|
||||
op.drop_table('import_rollback_audit')
|
||||
|
||||
# Drop import_jobs columns
|
||||
op.drop_column('import_jobs', 'rollback_by')
|
||||
op.drop_column('import_jobs', 'rollback_at')
|
||||
op.drop_column('import_jobs', 'imported_user_ids')
|
||||
op.drop_column('import_jobs', 'wordpress_metadata')
|
||||
op.drop_column('import_jobs', 'field_mapping')
|
||||
|
||||
# Drop subscriptions columns
|
||||
op.drop_column('subscriptions', 'manual_payment')
|
||||
op.drop_column('subscriptions', 'donation_cents')
|
||||
op.drop_column('subscriptions', 'base_subscription_cents')
|
||||
|
||||
# Drop events columns
|
||||
op.drop_column('events', 'calendar_uid')
|
||||
|
||||
# Drop users columns (in reverse order)
|
||||
op.drop_column('users', 'wordpress_registered_date')
|
||||
op.drop_column('users', 'wordpress_user_id')
|
||||
op.drop_column('users', 'import_job_id')
|
||||
op.drop_column('users', 'import_source')
|
||||
op.drop_column('users', 'rejected_by')
|
||||
op.drop_column('users', 'rejected_at')
|
||||
op.drop_column('users', 'rejection_reason')
|
||||
op.drop_column('users', 'last_renewal_reminder_at')
|
||||
op.drop_column('users', 'renewal_reminders_sent')
|
||||
op.drop_column('users', 'last_payment_reminder_at')
|
||||
op.drop_column('users', 'payment_reminders_sent')
|
||||
op.drop_column('users', 'last_event_attendance_reminder_at')
|
||||
op.drop_column('users', 'event_attendance_reminders_sent')
|
||||
op.drop_column('users', 'last_email_verification_reminder_at')
|
||||
op.drop_column('users', 'email_verification_reminders_sent')
|
||||
op.drop_column('users', 'member_since')
|
||||
op.drop_column('users', 'tos_accepted_at')
|
||||
op.drop_column('users', 'accepts_tos')
|
||||
op.drop_column('users', 'force_password_change')
|
||||
op.drop_column('users', 'password_reset_expires')
|
||||
op.drop_column('users', 'password_reset_token')
|
||||
op.drop_column('users', 'show_in_directory')
|
||||
op.drop_column('users', 'scholarship_requested')
|
||||
op.drop_column('users', 'volunteer_interests')
|
||||
op.drop_column('users', 'newsletter_publish_none')
|
||||
op.drop_column('users', 'newsletter_publish_birthday')
|
||||
op.drop_column('users', 'newsletter_publish_photo')
|
||||
op.drop_column('users', 'newsletter_publish_name')
|
||||
37
alembic/versions/010_add_email_verification_expires.py
Normal file
37
alembic/versions/010_add_email_verification_expires.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""add_email_verification_expires
|
||||
|
||||
Revision ID: 010_add_email_exp
|
||||
Revises: 009_add_all_missing
|
||||
Create Date: 2026-01-05
|
||||
|
||||
Fixes:
|
||||
- Add missing email_verification_expires column to users table
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '010_add_email_exp'
|
||||
down_revision: Union[str, None] = '009_add_all_missing'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Add email_verification_expires column (skip if already exists)"""
|
||||
from sqlalchemy import inspect
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
existing_columns = {col['name'] for col in inspector.get_columns('users')}
|
||||
|
||||
# Add email_verification_expires if missing
|
||||
if 'email_verification_expires' not in existing_columns:
|
||||
op.add_column('users', sa.Column('email_verification_expires', sa.DateTime(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Remove email_verification_expires column"""
|
||||
op.drop_column('users', 'email_verification_expires')
|
||||
410
alembic/versions/011_align_prod_with_dev.py
Normal file
410
alembic/versions/011_align_prod_with_dev.py
Normal file
@@ -0,0 +1,410 @@
|
||||
"""align_prod_with_dev
|
||||
|
||||
Revision ID: 011_align_prod_dev
|
||||
Revises: 010_add_email_exp
|
||||
Create Date: 2026-01-05
|
||||
|
||||
Aligns PROD database schema with DEV database schema (source of truth).
|
||||
Fixes type mismatches, removes PROD-only columns, adds DEV-only columns, updates nullable constraints.
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import JSONB, JSON
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '011_align_prod_dev'
|
||||
down_revision: Union[str, None] = '010_add_email_exp'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Align PROD schema with DEV schema (source of truth)"""
|
||||
from sqlalchemy import inspect
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
|
||||
print("Starting schema alignment: PROD → DEV (source of truth)...")
|
||||
|
||||
# ============================================================
|
||||
# 1. FIX USERS TABLE
|
||||
# ============================================================
|
||||
print("\n[1/14] Fixing users table...")
|
||||
|
||||
users_columns = {col['name'] for col in inspector.get_columns('users')}
|
||||
|
||||
# Remove PROD-only columns (not in models.py or DEV)
|
||||
if 'bio' in users_columns:
|
||||
op.drop_column('users', 'bio')
|
||||
print(" ✓ Removed users.bio (PROD-only)")
|
||||
|
||||
if 'interests' in users_columns:
|
||||
op.drop_column('users', 'interests')
|
||||
print(" ✓ Removed users.interests (PROD-only)")
|
||||
|
||||
try:
|
||||
# Change constrained VARCHAR(n) to unconstrained VARCHAR
|
||||
op.alter_column('users', 'first_name', type_=sa.String(), postgresql_using='first_name::varchar')
|
||||
op.alter_column('users', 'last_name', type_=sa.String(), postgresql_using='last_name::varchar')
|
||||
op.alter_column('users', 'email', type_=sa.String(), postgresql_using='email::varchar')
|
||||
op.alter_column('users', 'phone', type_=sa.String(), postgresql_using='phone::varchar')
|
||||
op.alter_column('users', 'city', type_=sa.String(), postgresql_using='city::varchar')
|
||||
op.alter_column('users', 'state', type_=sa.String(), postgresql_using='state::varchar')
|
||||
op.alter_column('users', 'zipcode', type_=sa.String(), postgresql_using='zipcode::varchar')
|
||||
op.alter_column('users', 'partner_first_name', type_=sa.String(), postgresql_using='partner_first_name::varchar')
|
||||
op.alter_column('users', 'partner_last_name', type_=sa.String(), postgresql_using='partner_last_name::varchar')
|
||||
op.alter_column('users', 'referred_by_member_name', type_=sa.String(), postgresql_using='referred_by_member_name::varchar')
|
||||
op.alter_column('users', 'password_hash', type_=sa.String(), postgresql_using='password_hash::varchar')
|
||||
op.alter_column('users', 'email_verification_token', type_=sa.String(), postgresql_using='email_verification_token::varchar')
|
||||
op.alter_column('users', 'password_reset_token', type_=sa.String(), postgresql_using='password_reset_token::varchar')
|
||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
||||
|
||||
# Change TEXT to VARCHAR
|
||||
op.alter_column('users', 'address', type_=sa.String(), postgresql_using='address::varchar')
|
||||
op.alter_column('users', 'profile_photo_url', type_=sa.String(), postgresql_using='profile_photo_url::varchar')
|
||||
print(" ✓ Changed TEXT to VARCHAR")
|
||||
|
||||
# Change DATE to TIMESTAMP
|
||||
op.alter_column('users', 'date_of_birth', type_=sa.DateTime(), postgresql_using='date_of_birth::timestamp')
|
||||
op.alter_column('users', 'member_since', type_=sa.DateTime(), postgresql_using='member_since::timestamp')
|
||||
print(" ✓ Changed DATE to TIMESTAMP")
|
||||
|
||||
# Change JSONB to JSON
|
||||
op.alter_column('users', 'lead_sources', type_=JSON(), postgresql_using='lead_sources::json')
|
||||
print(" ✓ Changed lead_sources JSONB to JSON")
|
||||
|
||||
# Change TEXT to JSON for volunteer_interests
|
||||
op.alter_column('users', 'volunteer_interests', type_=JSON(), postgresql_using='volunteer_interests::json')
|
||||
print(" ✓ Changed volunteer_interests TEXT to JSON")
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: Some type conversions failed: {e}")
|
||||
|
||||
# Fill NULL values with defaults BEFORE setting NOT NULL constraints
|
||||
print(" ⏳ Filling NULL values with defaults...")
|
||||
|
||||
# Update string fields
|
||||
conn.execute(sa.text("UPDATE users SET address = '' WHERE address IS NULL"))
|
||||
conn.execute(sa.text("UPDATE users SET city = '' WHERE city IS NULL"))
|
||||
conn.execute(sa.text("UPDATE users SET state = '' WHERE state IS NULL"))
|
||||
conn.execute(sa.text("UPDATE users SET zipcode = '' WHERE zipcode IS NULL"))
|
||||
conn.execute(sa.text("UPDATE users SET phone = '' WHERE phone IS NULL"))
|
||||
|
||||
# Update date_of_birth with sentinel date
|
||||
conn.execute(sa.text("UPDATE users SET date_of_birth = '1900-01-01'::timestamp WHERE date_of_birth IS NULL"))
|
||||
|
||||
# Update boolean fields
|
||||
conn.execute(sa.text("UPDATE users SET show_in_directory = false WHERE show_in_directory IS NULL"))
|
||||
conn.execute(sa.text("UPDATE users SET newsletter_publish_name = false WHERE newsletter_publish_name IS NULL"))
|
||||
conn.execute(sa.text("UPDATE users SET newsletter_publish_birthday = false WHERE newsletter_publish_birthday IS NULL"))
|
||||
conn.execute(sa.text("UPDATE users SET newsletter_publish_photo = false WHERE newsletter_publish_photo IS NULL"))
|
||||
conn.execute(sa.text("UPDATE users SET newsletter_publish_none = false WHERE newsletter_publish_none IS NULL"))
|
||||
conn.execute(sa.text("UPDATE users SET force_password_change = false WHERE force_password_change IS NULL"))
|
||||
conn.execute(sa.text("UPDATE users SET scholarship_requested = false WHERE scholarship_requested IS NULL"))
|
||||
conn.execute(sa.text("UPDATE users SET accepts_tos = false WHERE accepts_tos IS NULL"))
|
||||
|
||||
# Check how many rows were updated
|
||||
null_check = conn.execute(sa.text("""
|
||||
SELECT
|
||||
COUNT(*) FILTER (WHERE address = '') as address_filled,
|
||||
COUNT(*) FILTER (WHERE date_of_birth = '1900-01-01'::timestamp) as dob_filled
|
||||
FROM users
|
||||
""")).fetchone()
|
||||
print(f" ✓ Filled NULLs: {null_check[0]} addresses, {null_check[1]} dates of birth")
|
||||
|
||||
# Now safe to set NOT NULL constraints
|
||||
op.alter_column('users', 'address', nullable=False)
|
||||
op.alter_column('users', 'city', nullable=False)
|
||||
op.alter_column('users', 'state', nullable=False)
|
||||
op.alter_column('users', 'zipcode', nullable=False)
|
||||
op.alter_column('users', 'phone', nullable=False)
|
||||
op.alter_column('users', 'date_of_birth', nullable=False)
|
||||
op.alter_column('users', 'show_in_directory', nullable=False)
|
||||
op.alter_column('users', 'newsletter_publish_name', nullable=False)
|
||||
op.alter_column('users', 'newsletter_publish_birthday', nullable=False)
|
||||
op.alter_column('users', 'newsletter_publish_photo', nullable=False)
|
||||
op.alter_column('users', 'newsletter_publish_none', nullable=False)
|
||||
op.alter_column('users', 'force_password_change', nullable=False)
|
||||
op.alter_column('users', 'scholarship_requested', nullable=False)
|
||||
op.alter_column('users', 'accepts_tos', nullable=False)
|
||||
print(" ✓ Set NOT NULL constraints")
|
||||
|
||||
# ============================================================
|
||||
# 2. FIX DONATIONS TABLE
|
||||
# ============================================================
|
||||
print("\n[2/14] Fixing donations table...")
|
||||
|
||||
donations_columns = {col['name'] for col in inspector.get_columns('donations')}
|
||||
|
||||
# Remove PROD-only columns
|
||||
if 'is_anonymous' in donations_columns:
|
||||
op.drop_column('donations', 'is_anonymous')
|
||||
print(" ✓ Removed donations.is_anonymous (PROD-only)")
|
||||
|
||||
if 'completed_at' in donations_columns:
|
||||
op.drop_column('donations', 'completed_at')
|
||||
print(" ✓ Removed donations.completed_at (PROD-only)")
|
||||
|
||||
if 'message' in donations_columns:
|
||||
op.drop_column('donations', 'message')
|
||||
print(" ✓ Removed donations.message (PROD-only)")
|
||||
|
||||
try:
|
||||
op.alter_column('donations', 'donor_email', type_=sa.String(), postgresql_using='donor_email::varchar')
|
||||
op.alter_column('donations', 'donor_name', type_=sa.String(), postgresql_using='donor_name::varchar')
|
||||
op.alter_column('donations', 'stripe_payment_intent_id', type_=sa.String(), postgresql_using='stripe_payment_intent_id::varchar')
|
||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: Type conversion failed: {e}")
|
||||
|
||||
# ============================================================
|
||||
# 3. FIX SUBSCRIPTIONS TABLE
|
||||
# ============================================================
|
||||
print("\n[3/14] Fixing subscriptions table...")
|
||||
|
||||
subscriptions_columns = {col['name'] for col in inspector.get_columns('subscriptions')}
|
||||
|
||||
# Remove PROD-only columns
|
||||
if 'cancel_at_period_end' in subscriptions_columns:
|
||||
op.drop_column('subscriptions', 'cancel_at_period_end')
|
||||
print(" ✓ Removed subscriptions.cancel_at_period_end (PROD-only)")
|
||||
|
||||
if 'canceled_at' in subscriptions_columns:
|
||||
op.drop_column('subscriptions', 'canceled_at')
|
||||
print(" ✓ Removed subscriptions.canceled_at (PROD-only)")
|
||||
|
||||
if 'current_period_start' in subscriptions_columns:
|
||||
op.drop_column('subscriptions', 'current_period_start')
|
||||
print(" ✓ Removed subscriptions.current_period_start (PROD-only)")
|
||||
|
||||
if 'current_period_end' in subscriptions_columns:
|
||||
op.drop_column('subscriptions', 'current_period_end')
|
||||
print(" ✓ Removed subscriptions.current_period_end (PROD-only)")
|
||||
|
||||
try:
|
||||
op.alter_column('subscriptions', 'stripe_subscription_id', type_=sa.String(), postgresql_using='stripe_subscription_id::varchar')
|
||||
op.alter_column('subscriptions', 'stripe_customer_id', type_=sa.String(), postgresql_using='stripe_customer_id::varchar')
|
||||
op.alter_column('subscriptions', 'payment_method', type_=sa.String(), postgresql_using='payment_method::varchar')
|
||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: Type conversion failed: {e}")
|
||||
|
||||
# Fix nullable constraints
|
||||
op.alter_column('subscriptions', 'start_date', nullable=False)
|
||||
op.alter_column('subscriptions', 'manual_payment', nullable=False)
|
||||
op.alter_column('subscriptions', 'donation_cents', nullable=False)
|
||||
op.alter_column('subscriptions', 'base_subscription_cents', nullable=False)
|
||||
print(" ✓ Fixed nullable constraints")
|
||||
|
||||
# ============================================================
|
||||
# 4. FIX STORAGE_USAGE TABLE
|
||||
# ============================================================
|
||||
print("\n[4/14] Fixing storage_usage table...")
|
||||
|
||||
storage_columns = {col['name'] for col in inspector.get_columns('storage_usage')}
|
||||
|
||||
# Remove PROD-only columns
|
||||
if 'created_at' in storage_columns:
|
||||
op.drop_column('storage_usage', 'created_at')
|
||||
print(" ✓ Removed storage_usage.created_at (PROD-only)")
|
||||
|
||||
if 'updated_at' in storage_columns:
|
||||
op.drop_column('storage_usage', 'updated_at')
|
||||
print(" ✓ Removed storage_usage.updated_at (PROD-only)")
|
||||
|
||||
op.alter_column('storage_usage', 'max_bytes_allowed', nullable=False)
|
||||
print(" ✓ Fixed nullable constraint")
|
||||
|
||||
# ============================================================
|
||||
# 5. FIX EVENT_GALLERIES TABLE (Add missing DEV columns)
|
||||
# ============================================================
|
||||
print("\n[5/14] Fixing event_galleries table...")
|
||||
|
||||
event_galleries_columns = {col['name'] for col in inspector.get_columns('event_galleries')}
|
||||
|
||||
# Add DEV-only columns (exist in models.py but not in PROD)
|
||||
if 'image_key' not in event_galleries_columns:
|
||||
op.add_column('event_galleries', sa.Column('image_key', sa.String(), nullable=False, server_default=''))
|
||||
print(" ✓ Added event_galleries.image_key")
|
||||
|
||||
if 'file_size_bytes' not in event_galleries_columns:
|
||||
op.add_column('event_galleries', sa.Column('file_size_bytes', sa.Integer(), nullable=False, server_default='0'))
|
||||
print(" ✓ Added event_galleries.file_size_bytes")
|
||||
|
||||
try:
|
||||
op.alter_column('event_galleries', 'image_url', type_=sa.String(), postgresql_using='image_url::varchar')
|
||||
print(" ✓ Changed TEXT to VARCHAR")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: Type conversion failed: {e}")
|
||||
|
||||
# Note: uploaded_by column already has correct nullable=False in both DEV and PROD
|
||||
|
||||
# ============================================================
|
||||
# 6. FIX BYLAWS_DOCUMENTS TABLE
|
||||
# ============================================================
|
||||
print("\n[6/14] Fixing bylaws_documents table...")
|
||||
|
||||
bylaws_columns = {col['name'] for col in inspector.get_columns('bylaws_documents')}
|
||||
|
||||
# Remove PROD-only column
|
||||
if 'updated_at' in bylaws_columns:
|
||||
op.drop_column('bylaws_documents', 'updated_at')
|
||||
print(" ✓ Removed bylaws_documents.updated_at (PROD-only)")
|
||||
|
||||
try:
|
||||
op.alter_column('bylaws_documents', 'title', type_=sa.String(), postgresql_using='title::varchar')
|
||||
op.alter_column('bylaws_documents', 'version', type_=sa.String(), postgresql_using='version::varchar')
|
||||
op.alter_column('bylaws_documents', 'document_url', type_=sa.String(), postgresql_using='document_url::varchar')
|
||||
op.alter_column('bylaws_documents', 'document_type', type_=sa.String(), postgresql_using='document_type::varchar')
|
||||
print(" ✓ Changed column types")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: Type conversion failed: {e}")
|
||||
|
||||
op.alter_column('bylaws_documents', 'document_type', nullable=True)
|
||||
print(" ✓ Fixed nullable constraint")
|
||||
|
||||
# ============================================================
|
||||
# 7. FIX EVENTS TABLE
|
||||
# ============================================================
|
||||
print("\n[7/14] Fixing events table...")
|
||||
|
||||
try:
|
||||
op.alter_column('events', 'title', type_=sa.String(), postgresql_using='title::varchar')
|
||||
op.alter_column('events', 'location', type_=sa.String(), postgresql_using='location::varchar')
|
||||
op.alter_column('events', 'calendar_uid', type_=sa.String(), postgresql_using='calendar_uid::varchar')
|
||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: {e}")
|
||||
|
||||
op.alter_column('events', 'location', nullable=False)
|
||||
op.alter_column('events', 'created_by', nullable=False)
|
||||
print(" ✓ Fixed nullable constraints")
|
||||
|
||||
# ============================================================
|
||||
# 8. FIX PERMISSIONS TABLE
|
||||
# ============================================================
|
||||
print("\n[8/14] Fixing permissions table...")
|
||||
|
||||
try:
|
||||
op.alter_column('permissions', 'code', type_=sa.String(), postgresql_using='code::varchar')
|
||||
op.alter_column('permissions', 'name', type_=sa.String(), postgresql_using='name::varchar')
|
||||
op.alter_column('permissions', 'module', type_=sa.String(), postgresql_using='module::varchar')
|
||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: {e}")
|
||||
|
||||
op.alter_column('permissions', 'module', nullable=False)
|
||||
print(" ✓ Fixed nullable constraint")
|
||||
|
||||
# ============================================================
|
||||
# 9. FIX ROLES TABLE
|
||||
# ============================================================
|
||||
print("\n[9/14] Fixing roles table...")
|
||||
|
||||
try:
|
||||
op.alter_column('roles', 'code', type_=sa.String(), postgresql_using='code::varchar')
|
||||
op.alter_column('roles', 'name', type_=sa.String(), postgresql_using='name::varchar')
|
||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: {e}")
|
||||
|
||||
op.alter_column('roles', 'is_system_role', nullable=False)
|
||||
print(" ✓ Fixed nullable constraint")
|
||||
|
||||
# ============================================================
|
||||
# 10. FIX USER_INVITATIONS TABLE
|
||||
# ============================================================
|
||||
print("\n[10/14] Fixing user_invitations table...")
|
||||
|
||||
try:
|
||||
op.alter_column('user_invitations', 'email', type_=sa.String(), postgresql_using='email::varchar')
|
||||
op.alter_column('user_invitations', 'token', type_=sa.String(), postgresql_using='token::varchar')
|
||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: {e}")
|
||||
|
||||
op.alter_column('user_invitations', 'invited_at', nullable=False)
|
||||
print(" ✓ Fixed nullable constraint")
|
||||
|
||||
# ============================================================
|
||||
# 11. FIX NEWSLETTER_ARCHIVES TABLE
|
||||
# ============================================================
|
||||
print("\n[11/14] Fixing newsletter_archives table...")
|
||||
|
||||
try:
|
||||
op.alter_column('newsletter_archives', 'title', type_=sa.String(), postgresql_using='title::varchar')
|
||||
op.alter_column('newsletter_archives', 'document_url', type_=sa.String(), postgresql_using='document_url::varchar')
|
||||
op.alter_column('newsletter_archives', 'document_type', type_=sa.String(), postgresql_using='document_type::varchar')
|
||||
print(" ✓ Changed column types")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: {e}")
|
||||
|
||||
op.alter_column('newsletter_archives', 'document_type', nullable=True)
|
||||
print(" ✓ Fixed nullable constraint")
|
||||
|
||||
# ============================================================
|
||||
# 12. FIX FINANCIAL_REPORTS TABLE
|
||||
# ============================================================
|
||||
print("\n[12/14] Fixing financial_reports table...")
|
||||
|
||||
try:
|
||||
op.alter_column('financial_reports', 'title', type_=sa.String(), postgresql_using='title::varchar')
|
||||
op.alter_column('financial_reports', 'document_url', type_=sa.String(), postgresql_using='document_url::varchar')
|
||||
op.alter_column('financial_reports', 'document_type', type_=sa.String(), postgresql_using='document_type::varchar')
|
||||
print(" ✓ Changed column types")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: {e}")
|
||||
|
||||
op.alter_column('financial_reports', 'document_type', nullable=True)
|
||||
print(" ✓ Fixed nullable constraint")
|
||||
|
||||
# ============================================================
|
||||
# 13. FIX IMPORT_JOBS TABLE
|
||||
# ============================================================
|
||||
print("\n[13/14] Fixing import_jobs table...")
|
||||
|
||||
try:
|
||||
op.alter_column('import_jobs', 'filename', type_=sa.String(), postgresql_using='filename::varchar')
|
||||
op.alter_column('import_jobs', 'file_key', type_=sa.String(), postgresql_using='file_key::varchar')
|
||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
||||
|
||||
# Change JSONB to JSON
|
||||
op.alter_column('import_jobs', 'errors', type_=JSON(), postgresql_using='errors::json')
|
||||
print(" ✓ Changed errors JSONB to JSON")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: {e}")
|
||||
|
||||
# Fix nullable constraints
|
||||
op.alter_column('import_jobs', 'processed_rows', nullable=False)
|
||||
op.alter_column('import_jobs', 'successful_rows', nullable=False)
|
||||
op.alter_column('import_jobs', 'failed_rows', nullable=False)
|
||||
op.alter_column('import_jobs', 'errors', nullable=False)
|
||||
op.alter_column('import_jobs', 'started_at', nullable=False)
|
||||
print(" ✓ Fixed nullable constraints")
|
||||
|
||||
# ============================================================
|
||||
# 14. FIX SUBSCRIPTION_PLANS TABLE
|
||||
# ============================================================
|
||||
print("\n[14/14] Fixing subscription_plans table...")
|
||||
|
||||
try:
|
||||
op.alter_column('subscription_plans', 'name', type_=sa.String(), postgresql_using='name::varchar')
|
||||
op.alter_column('subscription_plans', 'billing_cycle', type_=sa.String(), postgresql_using='billing_cycle::varchar')
|
||||
op.alter_column('subscription_plans', 'stripe_price_id', type_=sa.String(), postgresql_using='stripe_price_id::varchar')
|
||||
print(" ✓ Changed VARCHAR(n) to VARCHAR")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: {e}")
|
||||
|
||||
op.alter_column('subscription_plans', 'minimum_price_cents', nullable=False)
|
||||
print(" ✓ Fixed nullable constraint")
|
||||
|
||||
print("\n✅ Schema alignment complete! PROD now matches DEV (source of truth)")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Revert alignment changes (not recommended)"""
|
||||
print("⚠️ Downgrade not supported for alignment migration")
|
||||
print(" To revert, restore from backup")
|
||||
pass
|
||||
170
alembic/versions/012_fix_remaining_differences.py
Normal file
170
alembic/versions/012_fix_remaining_differences.py
Normal file
@@ -0,0 +1,170 @@
|
||||
"""fix_remaining_differences
|
||||
|
||||
Revision ID: 012_fix_remaining
|
||||
Revises: 011_align_prod_dev
|
||||
Create Date: 2026-01-05
|
||||
|
||||
Fixes the last 5 schema differences found after migration 011:
|
||||
1-2. import_rollback_audit nullable constraints (PROD)
|
||||
3-4. role_permissions type and nullable (PROD)
|
||||
5. UserStatus enum values (DEV - remove deprecated values)
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import ENUM
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '012_fix_remaining'
|
||||
down_revision: Union[str, None] = '011_align_prod_dev'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Fix remaining schema differences"""
|
||||
from sqlalchemy import inspect
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
|
||||
print("Fixing remaining schema differences...")
|
||||
|
||||
# ============================================================
|
||||
# 1. FIX IMPORT_ROLLBACK_AUDIT TABLE (PROD only)
|
||||
# ============================================================
|
||||
print("\n[1/3] Fixing import_rollback_audit nullable constraints...")
|
||||
|
||||
# Check if there are any NULL values first
|
||||
try:
|
||||
null_count = conn.execute(sa.text("""
|
||||
SELECT COUNT(*) FROM import_rollback_audit
|
||||
WHERE created_at IS NULL OR rolled_back_at IS NULL
|
||||
""")).scalar()
|
||||
|
||||
if null_count > 0:
|
||||
# Fill NULLs with current timestamp
|
||||
conn.execute(sa.text("""
|
||||
UPDATE import_rollback_audit
|
||||
SET created_at = NOW() WHERE created_at IS NULL
|
||||
"""))
|
||||
conn.execute(sa.text("""
|
||||
UPDATE import_rollback_audit
|
||||
SET rolled_back_at = NOW() WHERE rolled_back_at IS NULL
|
||||
"""))
|
||||
print(f" ✓ Filled {null_count} NULL timestamps")
|
||||
|
||||
# Now set NOT NULL
|
||||
op.alter_column('import_rollback_audit', 'created_at', nullable=False)
|
||||
op.alter_column('import_rollback_audit', 'rolled_back_at', nullable=False)
|
||||
print(" ✓ Set NOT NULL constraints")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: {e}")
|
||||
|
||||
# ============================================================
|
||||
# 2. FIX ROLE_PERMISSIONS TABLE (PROD only)
|
||||
# ============================================================
|
||||
print("\n[2/3] Fixing role_permissions.role type and nullable...")
|
||||
|
||||
try:
|
||||
# Change VARCHAR(50) to VARCHAR(10) to match UserRole enum
|
||||
op.alter_column('role_permissions', 'role',
|
||||
type_=sa.String(10),
|
||||
postgresql_using='role::varchar(10)')
|
||||
print(" ✓ Changed VARCHAR(50) to VARCHAR(10)")
|
||||
|
||||
# Set NOT NULL
|
||||
op.alter_column('role_permissions', 'role', nullable=False)
|
||||
print(" ✓ Set NOT NULL constraint")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: {e}")
|
||||
|
||||
# ============================================================
|
||||
# 3. FIX USERSTATUS ENUM (DEV only - remove deprecated values)
|
||||
# ============================================================
|
||||
print("\n[3/3] Fixing UserStatus enum values...")
|
||||
|
||||
try:
|
||||
# First, check if the enum has deprecated values
|
||||
enum_values = conn.execute(sa.text("""
|
||||
SELECT enumlabel
|
||||
FROM pg_enum
|
||||
WHERE enumtypid = (
|
||||
SELECT oid FROM pg_type WHERE typname = 'userstatus'
|
||||
)
|
||||
""")).fetchall()
|
||||
|
||||
enum_values_list = [row[0] for row in enum_values]
|
||||
has_deprecated = 'pending_approval' in enum_values_list or 'pre_approved' in enum_values_list
|
||||
|
||||
if not has_deprecated:
|
||||
print(" ✓ UserStatus enum already correct (no deprecated values)")
|
||||
else:
|
||||
print(" ⏳ Found deprecated enum values, migrating...")
|
||||
|
||||
# Check if any users have deprecated status values
|
||||
deprecated_count = conn.execute(sa.text("""
|
||||
SELECT COUNT(*) FROM users
|
||||
WHERE status IN ('pending_approval', 'pre_approved')
|
||||
""")).scalar()
|
||||
|
||||
if deprecated_count > 0:
|
||||
print(f" ⏳ Migrating {deprecated_count} users with deprecated status values...")
|
||||
|
||||
# Migrate deprecated values to new equivalents
|
||||
conn.execute(sa.text("""
|
||||
UPDATE users
|
||||
SET status = 'pre_validated'
|
||||
WHERE status = 'pre_approved'
|
||||
"""))
|
||||
|
||||
conn.execute(sa.text("""
|
||||
UPDATE users
|
||||
SET status = 'payment_pending'
|
||||
WHERE status = 'pending_approval'
|
||||
"""))
|
||||
|
||||
print(" ✓ Migrated deprecated status values")
|
||||
else:
|
||||
print(" ✓ No users with deprecated status values")
|
||||
|
||||
# Now remove deprecated enum values
|
||||
# PostgreSQL doesn't support removing enum values directly,
|
||||
# so we need to recreate the enum
|
||||
conn.execute(sa.text("""
|
||||
-- Create new enum with correct values (matches models.py)
|
||||
CREATE TYPE userstatus_new AS ENUM (
|
||||
'pending_email',
|
||||
'pending_validation',
|
||||
'pre_validated',
|
||||
'payment_pending',
|
||||
'active',
|
||||
'inactive',
|
||||
'canceled',
|
||||
'expired',
|
||||
'rejected',
|
||||
'abandoned'
|
||||
);
|
||||
|
||||
-- Update column to use new enum
|
||||
ALTER TABLE users
|
||||
ALTER COLUMN status TYPE userstatus_new
|
||||
USING status::text::userstatus_new;
|
||||
|
||||
-- Drop old enum and rename new one
|
||||
DROP TYPE userstatus;
|
||||
ALTER TYPE userstatus_new RENAME TO userstatus;
|
||||
"""))
|
||||
|
||||
print(" ✓ Updated UserStatus enum (removed deprecated values)")
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Warning: Enum update failed (may already be correct): {e}")
|
||||
|
||||
print("\n✅ All remaining differences fixed!")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Revert fixes (not recommended)"""
|
||||
print("⚠️ Downgrade not supported")
|
||||
pass
|
||||
147
alembic/versions/013_sync_role_permissions.py
Normal file
147
alembic/versions/013_sync_role_permissions.py
Normal file
@@ -0,0 +1,147 @@
|
||||
"""sync_role_permissions
|
||||
|
||||
Revision ID: 013_sync_permissions
|
||||
Revises: 012_fix_remaining
|
||||
Create Date: 2026-01-05
|
||||
|
||||
Syncs role_permissions between DEV and PROD bidirectionally.
|
||||
- Adds 18 DEV-only permissions to PROD (new features)
|
||||
- Adds 6 PROD-only permissions to DEV (operational/security)
|
||||
Result: Both environments have identical 142 permission mappings
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '013_sync_permissions'
|
||||
down_revision: Union[str, None] = '012_fix_remaining'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Sync role_permissions bidirectionally"""
|
||||
from sqlalchemy import text
|
||||
|
||||
conn = op.get_bind()
|
||||
|
||||
print("Syncing role_permissions between environments...")
|
||||
|
||||
# ============================================================
|
||||
# STEP 1: Add missing permissions to ensure all exist
|
||||
# ============================================================
|
||||
print("\n[1/2] Ensuring all permissions exist...")
|
||||
|
||||
# Permissions that should exist (union of both environments)
|
||||
all_permissions = [
|
||||
# From DEV-only list
|
||||
('donations.export', 'Export Donations', 'donations'),
|
||||
('donations.view', 'View Donations', 'donations'),
|
||||
('financials.create', 'Create Financial Reports', 'financials'),
|
||||
('financials.delete', 'Delete Financial Reports', 'financials'),
|
||||
('financials.edit', 'Edit Financial Reports', 'financials'),
|
||||
('financials.export', 'Export Financial Reports', 'financials'),
|
||||
('financials.payments', 'Manage Financial Payments', 'financials'),
|
||||
('settings.edit', 'Edit Settings', 'settings'),
|
||||
('settings.email_templates', 'Manage Email Templates', 'settings'),
|
||||
('subscriptions.activate', 'Activate Subscriptions', 'subscriptions'),
|
||||
('subscriptions.cancel', 'Cancel Subscriptions', 'subscriptions'),
|
||||
('subscriptions.create', 'Create Subscriptions', 'subscriptions'),
|
||||
('subscriptions.edit', 'Edit Subscriptions', 'subscriptions'),
|
||||
('subscriptions.export', 'Export Subscriptions', 'subscriptions'),
|
||||
('subscriptions.plans', 'Manage Subscription Plans', 'subscriptions'),
|
||||
('subscriptions.view', 'View Subscriptions', 'subscriptions'),
|
||||
('events.calendar_export', 'Export Event Calendar', 'events'),
|
||||
('events.rsvps', 'View Event RSVPs', 'events'),
|
||||
# From PROD-only list
|
||||
('permissions.audit', 'Audit Permissions', 'permissions'),
|
||||
('permissions.view', 'View Permissions', 'permissions'),
|
||||
('settings.backup', 'Manage Backups', 'settings'),
|
||||
]
|
||||
|
||||
for code, name, module in all_permissions:
|
||||
# Insert if not exists
|
||||
conn.execute(text(f"""
|
||||
INSERT INTO permissions (id, code, name, description, module, created_at)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
'{code}',
|
||||
'{name}',
|
||||
'{name}',
|
||||
'{module}',
|
||||
NOW()
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1 FROM permissions WHERE code = '{code}'
|
||||
)
|
||||
"""))
|
||||
|
||||
print(" ✓ Ensured all permissions exist")
|
||||
|
||||
# ============================================================
|
||||
# STEP 2: Add missing role-permission mappings
|
||||
# ============================================================
|
||||
print("\n[2/2] Adding missing role-permission mappings...")
|
||||
|
||||
# Mappings that should exist (union of both environments)
|
||||
role_permission_mappings = [
|
||||
# DEV-only (add to PROD)
|
||||
('admin', 'donations.export'),
|
||||
('admin', 'donations.view'),
|
||||
('admin', 'financials.create'),
|
||||
('admin', 'financials.delete'),
|
||||
('admin', 'financials.edit'),
|
||||
('admin', 'financials.export'),
|
||||
('admin', 'financials.payments'),
|
||||
('admin', 'settings.edit'),
|
||||
('admin', 'settings.email_templates'),
|
||||
('admin', 'subscriptions.activate'),
|
||||
('admin', 'subscriptions.cancel'),
|
||||
('admin', 'subscriptions.create'),
|
||||
('admin', 'subscriptions.edit'),
|
||||
('admin', 'subscriptions.export'),
|
||||
('admin', 'subscriptions.plans'),
|
||||
('admin', 'subscriptions.view'),
|
||||
('member', 'events.calendar_export'),
|
||||
('member', 'events.rsvps'),
|
||||
# PROD-only (add to DEV)
|
||||
('admin', 'permissions.audit'),
|
||||
('admin', 'permissions.view'),
|
||||
('admin', 'settings.backup'),
|
||||
('finance', 'bylaws.view'),
|
||||
('finance', 'events.view'),
|
||||
('finance', 'newsletters.view'),
|
||||
]
|
||||
|
||||
added_count = 0
|
||||
for role, perm_code in role_permission_mappings:
|
||||
result = conn.execute(text(f"""
|
||||
INSERT INTO role_permissions (id, role, permission_id, created_at)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
'{role}',
|
||||
p.id,
|
||||
NOW()
|
||||
FROM permissions p
|
||||
WHERE p.code = '{perm_code}'
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM role_permissions rp
|
||||
WHERE rp.role = '{role}'
|
||||
AND rp.permission_id = p.id
|
||||
)
|
||||
RETURNING id
|
||||
"""))
|
||||
if result.rowcount > 0:
|
||||
added_count += 1
|
||||
|
||||
print(f" ✓ Added {added_count} missing role-permission mappings")
|
||||
|
||||
# Verify final count
|
||||
final_count = conn.execute(text("SELECT COUNT(*) FROM role_permissions")).scalar()
|
||||
print(f"\n✅ Role-permission mappings synchronized: {final_count} total")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Revert sync (not recommended)"""
|
||||
print("⚠️ Downgrade not supported - permissions are additive")
|
||||
pass
|
||||
39
alembic/versions/014_add_custom_registration_data.py
Normal file
39
alembic/versions/014_add_custom_registration_data.py
Normal file
@@ -0,0 +1,39 @@
|
||||
"""add_custom_registration_data
|
||||
|
||||
Revision ID: 014_custom_registration
|
||||
Revises: a1b2c3d4e5f6
|
||||
Create Date: 2026-02-01 10:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '014_custom_registration'
|
||||
down_revision: Union[str, None] = 'a1b2c3d4e5f6'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Add custom_registration_data column to users table
|
||||
# This stores dynamic registration field responses as JSON
|
||||
op.add_column('users', sa.Column(
|
||||
'custom_registration_data',
|
||||
sa.JSON,
|
||||
nullable=False,
|
||||
server_default='{}'
|
||||
))
|
||||
|
||||
# Add comment for documentation
|
||||
op.execute("""
|
||||
COMMENT ON COLUMN users.custom_registration_data IS
|
||||
'Dynamic registration field responses stored as JSON for custom form fields';
|
||||
""")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_column('users', 'custom_registration_data')
|
||||
48
alembic/versions/4fa11836f7fd_add_role_audit_fields.py
Normal file
48
alembic/versions/4fa11836f7fd_add_role_audit_fields.py
Normal file
@@ -0,0 +1,48 @@
|
||||
"""add_role_audit_fields
|
||||
|
||||
Revision ID: 4fa11836f7fd
|
||||
Revises: 013_sync_permissions
|
||||
Create Date: 2026-01-16 17:21:40.514605
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '4fa11836f7fd'
|
||||
down_revision: Union[str, None] = '013_sync_permissions'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Add role audit trail columns
|
||||
op.add_column('users', sa.Column('role_changed_at', sa.DateTime(timezone=True), nullable=True))
|
||||
op.add_column('users', sa.Column('role_changed_by', UUID(as_uuid=True), nullable=True))
|
||||
|
||||
# Create foreign key constraint to track who changed the role
|
||||
op.create_foreign_key(
|
||||
'fk_users_role_changed_by',
|
||||
'users', 'users',
|
||||
['role_changed_by'], ['id'],
|
||||
ondelete='SET NULL'
|
||||
)
|
||||
|
||||
# Create index for efficient querying by role change date
|
||||
op.create_index('idx_users_role_changed_at', 'users', ['role_changed_at'])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop index first
|
||||
op.drop_index('idx_users_role_changed_at')
|
||||
|
||||
# Drop foreign key constraint
|
||||
op.drop_constraint('fk_users_role_changed_by', 'users', type_='foreignkey')
|
||||
|
||||
# Drop columns
|
||||
op.drop_column('users', 'role_changed_by')
|
||||
op.drop_column('users', 'role_changed_at')
|
||||
@@ -0,0 +1,76 @@
|
||||
"""add_stripe_transaction_metadata
|
||||
|
||||
Revision ID: 956ea1628264
|
||||
Revises: ec4cb4a49cde
|
||||
Create Date: 2026-01-20 22:00:01.806931
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '956ea1628264'
|
||||
down_revision: Union[str, None] = 'ec4cb4a49cde'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Add Stripe transaction metadata to subscriptions table
|
||||
op.add_column('subscriptions', sa.Column('stripe_payment_intent_id', sa.String(), nullable=True))
|
||||
op.add_column('subscriptions', sa.Column('stripe_charge_id', sa.String(), nullable=True))
|
||||
op.add_column('subscriptions', sa.Column('stripe_invoice_id', sa.String(), nullable=True))
|
||||
op.add_column('subscriptions', sa.Column('payment_completed_at', sa.DateTime(timezone=True), nullable=True))
|
||||
op.add_column('subscriptions', sa.Column('card_last4', sa.String(4), nullable=True))
|
||||
op.add_column('subscriptions', sa.Column('card_brand', sa.String(20), nullable=True))
|
||||
op.add_column('subscriptions', sa.Column('stripe_receipt_url', sa.String(), nullable=True))
|
||||
|
||||
# Add indexes for Stripe transaction IDs in subscriptions
|
||||
op.create_index('idx_subscriptions_payment_intent', 'subscriptions', ['stripe_payment_intent_id'])
|
||||
op.create_index('idx_subscriptions_charge_id', 'subscriptions', ['stripe_charge_id'])
|
||||
op.create_index('idx_subscriptions_invoice_id', 'subscriptions', ['stripe_invoice_id'])
|
||||
|
||||
# Add Stripe transaction metadata to donations table
|
||||
op.add_column('donations', sa.Column('stripe_charge_id', sa.String(), nullable=True))
|
||||
op.add_column('donations', sa.Column('stripe_customer_id', sa.String(), nullable=True))
|
||||
op.add_column('donations', sa.Column('payment_completed_at', sa.DateTime(timezone=True), nullable=True))
|
||||
op.add_column('donations', sa.Column('card_last4', sa.String(4), nullable=True))
|
||||
op.add_column('donations', sa.Column('card_brand', sa.String(20), nullable=True))
|
||||
op.add_column('donations', sa.Column('stripe_receipt_url', sa.String(), nullable=True))
|
||||
|
||||
# Add indexes for Stripe transaction IDs in donations
|
||||
op.create_index('idx_donations_payment_intent', 'donations', ['stripe_payment_intent_id'])
|
||||
op.create_index('idx_donations_charge_id', 'donations', ['stripe_charge_id'])
|
||||
op.create_index('idx_donations_customer_id', 'donations', ['stripe_customer_id'])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Remove indexes from donations
|
||||
op.drop_index('idx_donations_customer_id', table_name='donations')
|
||||
op.drop_index('idx_donations_charge_id', table_name='donations')
|
||||
op.drop_index('idx_donations_payment_intent', table_name='donations')
|
||||
|
||||
# Remove columns from donations
|
||||
op.drop_column('donations', 'stripe_receipt_url')
|
||||
op.drop_column('donations', 'card_brand')
|
||||
op.drop_column('donations', 'card_last4')
|
||||
op.drop_column('donations', 'payment_completed_at')
|
||||
op.drop_column('donations', 'stripe_customer_id')
|
||||
op.drop_column('donations', 'stripe_charge_id')
|
||||
|
||||
# Remove indexes from subscriptions
|
||||
op.drop_index('idx_subscriptions_invoice_id', table_name='subscriptions')
|
||||
op.drop_index('idx_subscriptions_charge_id', table_name='subscriptions')
|
||||
op.drop_index('idx_subscriptions_payment_intent', table_name='subscriptions')
|
||||
|
||||
# Remove columns from subscriptions
|
||||
op.drop_column('subscriptions', 'stripe_receipt_url')
|
||||
op.drop_column('subscriptions', 'card_brand')
|
||||
op.drop_column('subscriptions', 'card_last4')
|
||||
op.drop_column('subscriptions', 'payment_completed_at')
|
||||
op.drop_column('subscriptions', 'stripe_invoice_id')
|
||||
op.drop_column('subscriptions', 'stripe_charge_id')
|
||||
op.drop_column('subscriptions', 'stripe_payment_intent_id')
|
||||
100
alembic/versions/add_payment_methods.py
Normal file
100
alembic/versions/add_payment_methods.py
Normal file
@@ -0,0 +1,100 @@
|
||||
"""add_payment_methods
|
||||
|
||||
Revision ID: a1b2c3d4e5f6
|
||||
Revises: 956ea1628264
|
||||
Create Date: 2026-01-30 10:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'a1b2c3d4e5f6'
|
||||
down_revision: Union[str, None] = '956ea1628264'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
|
||||
# Create PaymentMethodType enum
|
||||
paymentmethodtype = postgresql.ENUM(
|
||||
'card', 'cash', 'bank_transfer', 'check',
|
||||
name='paymentmethodtype',
|
||||
create_type=False
|
||||
)
|
||||
paymentmethodtype.create(conn, checkfirst=True)
|
||||
|
||||
# Check if stripe_customer_id column exists on users table
|
||||
result = conn.execute(sa.text("""
|
||||
SELECT column_name FROM information_schema.columns
|
||||
WHERE table_name = 'users' AND column_name = 'stripe_customer_id'
|
||||
"""))
|
||||
if result.fetchone() is None:
|
||||
# Add stripe_customer_id to users table
|
||||
op.add_column('users', sa.Column(
|
||||
'stripe_customer_id',
|
||||
sa.String(),
|
||||
nullable=True,
|
||||
comment='Stripe Customer ID for payment method management'
|
||||
))
|
||||
op.create_index('ix_users_stripe_customer_id', 'users', ['stripe_customer_id'])
|
||||
|
||||
# Check if payment_methods table exists
|
||||
result = conn.execute(sa.text("""
|
||||
SELECT table_name FROM information_schema.tables
|
||||
WHERE table_name = 'payment_methods'
|
||||
"""))
|
||||
if result.fetchone() is None:
|
||||
# Create payment_methods table
|
||||
op.create_table(
|
||||
'payment_methods',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), primary_key=True),
|
||||
sa.Column('user_id', postgresql.UUID(as_uuid=True), sa.ForeignKey('users.id', ondelete='CASCADE'), nullable=False),
|
||||
sa.Column('stripe_payment_method_id', sa.String(), nullable=True, unique=True, comment='Stripe pm_xxx reference'),
|
||||
sa.Column('card_brand', sa.String(20), nullable=True, comment='Card brand: visa, mastercard, amex, etc.'),
|
||||
sa.Column('card_last4', sa.String(4), nullable=True, comment='Last 4 digits of card'),
|
||||
sa.Column('card_exp_month', sa.Integer(), nullable=True, comment='Card expiration month'),
|
||||
sa.Column('card_exp_year', sa.Integer(), nullable=True, comment='Card expiration year'),
|
||||
sa.Column('card_funding', sa.String(20), nullable=True, comment='Card funding type: credit, debit, prepaid'),
|
||||
sa.Column('payment_type', paymentmethodtype, nullable=False, server_default='card'),
|
||||
sa.Column('is_default', sa.Boolean(), nullable=False, server_default='false', comment='Whether this is the default payment method for auto-renewals'),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=False, server_default='true', comment='Soft delete flag - False means removed'),
|
||||
sa.Column('is_manual', sa.Boolean(), nullable=False, server_default='false', comment='True for manually recorded methods (cash/check)'),
|
||||
sa.Column('manual_notes', sa.Text(), nullable=True, comment='Admin notes for manual payment methods'),
|
||||
sa.Column('created_by', postgresql.UUID(as_uuid=True), sa.ForeignKey('users.id', ondelete='SET NULL'), nullable=True, comment='Admin who added this on behalf of user'),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now(), onupdate=sa.func.now()),
|
||||
)
|
||||
|
||||
# Create indexes
|
||||
op.create_index('ix_payment_methods_user_id', 'payment_methods', ['user_id'])
|
||||
op.create_index('ix_payment_methods_stripe_pm_id', 'payment_methods', ['stripe_payment_method_id'])
|
||||
op.create_index('idx_payment_method_user_default', 'payment_methods', ['user_id', 'is_default'])
|
||||
op.create_index('idx_payment_method_active', 'payment_methods', ['user_id', 'is_active'])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop indexes
|
||||
op.drop_index('idx_payment_method_active', table_name='payment_methods')
|
||||
op.drop_index('idx_payment_method_user_default', table_name='payment_methods')
|
||||
op.drop_index('ix_payment_methods_stripe_pm_id', table_name='payment_methods')
|
||||
op.drop_index('ix_payment_methods_user_id', table_name='payment_methods')
|
||||
|
||||
# Drop payment_methods table
|
||||
op.drop_table('payment_methods')
|
||||
|
||||
# Drop stripe_customer_id from users
|
||||
op.drop_index('ix_users_stripe_customer_id', table_name='users')
|
||||
op.drop_column('users', 'stripe_customer_id')
|
||||
|
||||
# Drop PaymentMethodType enum
|
||||
paymentmethodtype = postgresql.ENUM(
|
||||
'card', 'cash', 'bank_transfer', 'check',
|
||||
name='paymentmethodtype'
|
||||
)
|
||||
paymentmethodtype.drop(op.get_bind(), checkfirst=True)
|
||||
68
alembic/versions/ec4cb4a49cde_add_system_settings_table.py
Normal file
68
alembic/versions/ec4cb4a49cde_add_system_settings_table.py
Normal file
@@ -0,0 +1,68 @@
|
||||
"""add_system_settings_table
|
||||
|
||||
Revision ID: ec4cb4a49cde
|
||||
Revises: 4fa11836f7fd
|
||||
Create Date: 2026-01-16 18:16:00.283455
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'ec4cb4a49cde'
|
||||
down_revision: Union[str, None] = '4fa11836f7fd'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create enum for setting types (only if not exists)
|
||||
op.execute("""
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE settingtype AS ENUM ('plaintext', 'encrypted', 'json');
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
""")
|
||||
|
||||
# Create system_settings table
|
||||
op.execute("""
|
||||
CREATE TABLE system_settings (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
setting_key VARCHAR(100) UNIQUE NOT NULL,
|
||||
setting_value TEXT,
|
||||
setting_type settingtype NOT NULL DEFAULT 'plaintext'::settingtype,
|
||||
description TEXT,
|
||||
updated_by UUID REFERENCES users(id) ON DELETE SET NULL,
|
||||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
is_sensitive BOOLEAN NOT NULL DEFAULT FALSE
|
||||
);
|
||||
|
||||
COMMENT ON COLUMN system_settings.setting_key IS 'Unique setting identifier (e.g., stripe_secret_key)';
|
||||
COMMENT ON COLUMN system_settings.setting_value IS 'Setting value (encrypted if setting_type is encrypted)';
|
||||
COMMENT ON COLUMN system_settings.setting_type IS 'Type of setting: plaintext, encrypted, or json';
|
||||
COMMENT ON COLUMN system_settings.description IS 'Human-readable description of the setting';
|
||||
COMMENT ON COLUMN system_settings.updated_by IS 'User who last updated this setting';
|
||||
COMMENT ON COLUMN system_settings.is_sensitive IS 'Whether this setting contains sensitive data';
|
||||
""")
|
||||
|
||||
# Create indexes
|
||||
op.create_index('idx_system_settings_key', 'system_settings', ['setting_key'])
|
||||
op.create_index('idx_system_settings_updated_at', 'system_settings', ['updated_at'])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop indexes
|
||||
op.drop_index('idx_system_settings_updated_at')
|
||||
op.drop_index('idx_system_settings_key')
|
||||
|
||||
# Drop table
|
||||
op.drop_table('system_settings')
|
||||
|
||||
# Drop enum
|
||||
op.execute('DROP TYPE IF EXISTS settingtype')
|
||||
133
auth.py
133
auth.py
@@ -1,5 +1,5 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Optional
|
||||
from typing import Optional, List
|
||||
from jose import JWTError, jwt
|
||||
from passlib.context import CryptContext
|
||||
from fastapi import Depends, HTTPException, status
|
||||
@@ -8,7 +8,7 @@ from sqlalchemy.orm import Session
|
||||
import os
|
||||
import secrets
|
||||
from database import get_db
|
||||
from models import User, UserRole
|
||||
from models import User, UserRole, Permission, RolePermission, Role
|
||||
|
||||
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||
security = HTTPBearer()
|
||||
@@ -50,6 +50,24 @@ def verify_reset_token(token, db):
|
||||
|
||||
return user
|
||||
|
||||
def get_user_role_code(user: User) -> str:
|
||||
"""
|
||||
Get user's role code from either dynamic role system or legacy enum.
|
||||
Supports backward compatibility during migration (Phase 3).
|
||||
|
||||
Args:
|
||||
user: User object
|
||||
|
||||
Returns:
|
||||
Role code string (e.g., "superadmin", "admin", "member", "guest")
|
||||
"""
|
||||
# Prefer dynamic role if set (Phase 3+)
|
||||
if user.role_id is not None and user.role_obj is not None:
|
||||
return user.role_obj.code
|
||||
|
||||
# Fallback to legacy enum (Phase 1-2)
|
||||
return user.role.value
|
||||
|
||||
def create_access_token(data: dict, expires_delta: Optional[timedelta] = None):
|
||||
to_encode = data.copy()
|
||||
if expires_delta:
|
||||
@@ -100,7 +118,9 @@ async def get_current_user(
|
||||
return user
|
||||
|
||||
async def get_current_admin_user(current_user: User = Depends(get_current_user)) -> User:
|
||||
if current_user.role != UserRole.admin:
|
||||
"""Require user to be admin or superadmin"""
|
||||
role_code = get_user_role_code(current_user)
|
||||
if role_code not in ["admin", "superadmin"]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Not enough permissions"
|
||||
@@ -108,7 +128,7 @@ async def get_current_admin_user(current_user: User = Depends(get_current_user))
|
||||
return current_user
|
||||
|
||||
async def get_active_member(current_user: User = Depends(get_current_user)) -> User:
|
||||
"""Require user to be active member with valid payment"""
|
||||
"""Require user to be active member or staff with valid status"""
|
||||
from models import UserStatus
|
||||
|
||||
if current_user.status != UserStatus.active:
|
||||
@@ -117,10 +137,113 @@ async def get_active_member(current_user: User = Depends(get_current_user)) -> U
|
||||
detail="Active membership required. Please complete payment."
|
||||
)
|
||||
|
||||
if current_user.role not in [UserRole.member, UserRole.admin]:
|
||||
role_code = get_user_role_code(current_user)
|
||||
if role_code not in ["member", "admin", "superadmin", "finance"]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Member access only"
|
||||
)
|
||||
|
||||
return current_user
|
||||
|
||||
|
||||
# ============================================================
|
||||
# RBAC Permission System
|
||||
# ============================================================
|
||||
|
||||
async def get_user_permissions(user: User, db: Session) -> List[str]:
|
||||
"""
|
||||
Get all permission codes for user's role.
|
||||
Superadmin automatically gets all permissions.
|
||||
Uses request-level caching to avoid repeated DB queries.
|
||||
Supports both dynamic roles (role_id) and legacy enum (role).
|
||||
|
||||
Args:
|
||||
user: Current authenticated user
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
List of permission code strings (e.g., ["users.view", "events.create"])
|
||||
"""
|
||||
# Check if permissions are already cached for this request
|
||||
if hasattr(user, '_permission_cache'):
|
||||
return user._permission_cache
|
||||
|
||||
# Get role code using helper
|
||||
role_code = get_user_role_code(user)
|
||||
|
||||
# Superadmin gets all permissions automatically
|
||||
if role_code == "superadmin":
|
||||
all_perms = db.query(Permission.code).all()
|
||||
permissions = [p[0] for p in all_perms]
|
||||
else:
|
||||
# Fetch permissions assigned to this role
|
||||
# Prefer dynamic role_id, fallback to enum
|
||||
if user.role_id is not None:
|
||||
# Use role_id for dynamic roles
|
||||
permissions = db.query(Permission.code)\
|
||||
.join(RolePermission)\
|
||||
.filter(RolePermission.role_id == user.role_id)\
|
||||
.all()
|
||||
else:
|
||||
# Fallback to legacy enum
|
||||
permissions = db.query(Permission.code)\
|
||||
.join(RolePermission)\
|
||||
.filter(RolePermission.role == user.role)\
|
||||
.all()
|
||||
permissions = [p[0] for p in permissions]
|
||||
|
||||
# Cache permissions on user object for this request
|
||||
user._permission_cache = permissions
|
||||
return permissions
|
||||
|
||||
|
||||
def require_permission(permission_code: str):
|
||||
"""
|
||||
Dependency injection for permission-based access control.
|
||||
|
||||
Usage:
|
||||
@app.get("/admin/users", dependencies=[Depends(require_permission("users.view"))])
|
||||
async def get_users():
|
||||
...
|
||||
|
||||
Args:
|
||||
permission_code: Permission code to check (e.g., "users.create")
|
||||
|
||||
Returns:
|
||||
Async function that checks if current user has the permission
|
||||
|
||||
Raises:
|
||||
HTTPException 403 if user lacks the required permission
|
||||
"""
|
||||
async def permission_checker(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: Session = Depends(get_db)
|
||||
) -> User:
|
||||
# Get user's permissions
|
||||
user_perms = await get_user_permissions(current_user, db)
|
||||
|
||||
# Check if user has the required permission
|
||||
if permission_code not in user_perms:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=f"Permission required: {permission_code}"
|
||||
)
|
||||
|
||||
return current_user
|
||||
|
||||
return permission_checker
|
||||
|
||||
|
||||
async def get_current_superadmin(current_user: User = Depends(get_current_user)) -> User:
|
||||
"""
|
||||
Require user to be superadmin.
|
||||
Used for endpoints that should only be accessible to superadmins.
|
||||
"""
|
||||
role_code = get_user_role_code(current_user)
|
||||
if role_code != "superadmin":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Superadmin access required"
|
||||
)
|
||||
return current_user
|
||||
|
||||
92
check_all_columns.sql
Normal file
92
check_all_columns.sql
Normal file
@@ -0,0 +1,92 @@
|
||||
-- Comprehensive check for all missing columns
|
||||
-- Run: psql -h 10.9.23.11 -p 54321 -U postgres -d loaf_new -f check_all_columns.sql
|
||||
|
||||
\echo '================================================================'
|
||||
\echo 'COMPREHENSIVE COLUMN CHECK FOR ALL TABLES'
|
||||
\echo '================================================================'
|
||||
|
||||
-- ============================================================
|
||||
-- 1. USERS TABLE
|
||||
-- ============================================================
|
||||
\echo ''
|
||||
\echo '1. USERS TABLE - Expected: 60+ columns'
|
||||
\echo 'Checking for specific columns:'
|
||||
|
||||
SELECT
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'newsletter_publish_name') THEN '✓' ELSE '✗' END || ' newsletter_publish_name',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'volunteer_interests') THEN '✓' ELSE '✗' END || ' volunteer_interests',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'scholarship_requested') THEN '✓' ELSE '✗' END || ' scholarship_requested',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'show_in_directory') THEN '✓' ELSE '✗' END || ' show_in_directory',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'password_reset_token') THEN '✓' ELSE '✗' END || ' password_reset_token',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'accepts_tos') THEN '✓' ELSE '✗' END || ' accepts_tos',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'member_since') THEN '✓' ELSE '✗' END || ' member_since',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'rejection_reason') THEN '✓' ELSE '✗' END || ' rejection_reason',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'import_source') THEN '✓' ELSE '✗' END || ' import_source'
|
||||
\gx
|
||||
|
||||
-- ============================================================
|
||||
-- 2. EVENTS TABLE
|
||||
-- ============================================================
|
||||
\echo ''
|
||||
\echo '2. EVENTS TABLE'
|
||||
|
||||
SELECT
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'events' AND column_name = 'calendar_uid') THEN '✓' ELSE '✗' END || ' calendar_uid';
|
||||
|
||||
-- ============================================================
|
||||
-- 3. SUBSCRIPTIONS TABLE
|
||||
-- ============================================================
|
||||
\echo ''
|
||||
\echo '3. SUBSCRIPTIONS TABLE'
|
||||
|
||||
SELECT
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'subscriptions' AND column_name = 'base_subscription_cents') THEN '✓' ELSE '✗' END || ' base_subscription_cents',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'subscriptions' AND column_name = 'donation_cents') THEN '✓' ELSE '✗' END || ' donation_cents',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'subscriptions' AND column_name = 'manual_payment') THEN '✓' ELSE '✗' END || ' manual_payment'
|
||||
\gx
|
||||
|
||||
-- ============================================================
|
||||
-- 4. IMPORT_JOBS TABLE
|
||||
-- ============================================================
|
||||
\echo ''
|
||||
\echo '4. IMPORT_JOBS TABLE'
|
||||
|
||||
SELECT
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'import_jobs' AND column_name = 'field_mapping') THEN '✓' ELSE '✗' END || ' field_mapping',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'import_jobs' AND column_name = 'wordpress_metadata') THEN '✓' ELSE '✗' END || ' wordpress_metadata',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'import_jobs' AND column_name = 'imported_user_ids') THEN '✓' ELSE '✗' END || ' imported_user_ids',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'import_jobs' AND column_name = 'rollback_at') THEN '✓' ELSE '✗' END || ' rollback_at',
|
||||
CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'import_jobs' AND column_name = 'rollback_by') THEN '✓' ELSE '✗' END || ' rollback_by'
|
||||
\gx
|
||||
|
||||
-- ============================================================
|
||||
-- 5. CHECK IF IMPORT_ROLLBACK_AUDIT TABLE EXISTS
|
||||
-- ============================================================
|
||||
\echo ''
|
||||
\echo '5. IMPORT_ROLLBACK_AUDIT TABLE - Should exist'
|
||||
SELECT CASE
|
||||
WHEN EXISTS (SELECT FROM information_schema.tables WHERE table_name = 'import_rollback_audit')
|
||||
THEN '✓ Table exists'
|
||||
ELSE '✗ TABLE MISSING - Need to create it'
|
||||
END AS status;
|
||||
|
||||
-- ============================================================
|
||||
-- SUMMARY: Count existing columns in each table
|
||||
-- ============================================================
|
||||
\echo ''
|
||||
\echo '================================================================'
|
||||
\echo 'SUMMARY: Column counts per table'
|
||||
\echo '================================================================'
|
||||
|
||||
SELECT
|
||||
table_name,
|
||||
COUNT(*) as column_count
|
||||
FROM information_schema.columns
|
||||
WHERE table_name IN (
|
||||
'users', 'events', 'event_rsvps', 'subscription_plans', 'subscriptions',
|
||||
'donations', 'event_galleries', 'newsletter_archives', 'financial_reports',
|
||||
'bylaws_documents', 'storage_usage', 'permissions', 'roles', 'role_permissions',
|
||||
'user_invitations', 'import_jobs', 'import_rollback_audit'
|
||||
)
|
||||
GROUP BY table_name
|
||||
ORDER BY table_name;
|
||||
345
check_db_integrity.py
Normal file
345
check_db_integrity.py
Normal file
@@ -0,0 +1,345 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Database Integrity Checker
|
||||
Compares schema and data integrity between development and production databases
|
||||
"""
|
||||
|
||||
import sys
|
||||
from sqlalchemy import create_engine, inspect, text
|
||||
from sqlalchemy.engine import reflection
|
||||
import json
|
||||
from collections import defaultdict
|
||||
|
||||
# Database URLs
|
||||
DEV_DB = "postgresql://postgres:RchhcpaUKZuZuMOvB5kwCP1weLBnAG6tNMXE5FHdk8AwCvolBMALYFVYRM7WCl9x@10.9.23.11:5001/membership_demo"
|
||||
PROD_DB = "postgresql://postgres:fDv3fRvMgfPueDWDUxj27NJVaynsewIdh6b2Hb28tcvG3Ew6mhscASg2kulx4tr7@10.9.23.11:54321/loaf_new"
|
||||
|
||||
def get_db_info(engine, label):
|
||||
"""Get comprehensive database information"""
|
||||
inspector = inspect(engine)
|
||||
|
||||
info = {
|
||||
'label': label,
|
||||
'tables': {},
|
||||
'indexes': {},
|
||||
'foreign_keys': {},
|
||||
'sequences': [],
|
||||
'enums': []
|
||||
}
|
||||
|
||||
# Get all table names
|
||||
table_names = inspector.get_table_names()
|
||||
|
||||
for table_name in table_names:
|
||||
# Get columns
|
||||
columns = inspector.get_columns(table_name)
|
||||
info['tables'][table_name] = {
|
||||
'columns': {
|
||||
col['name']: {
|
||||
'type': str(col['type']),
|
||||
'nullable': col['nullable'],
|
||||
'default': str(col.get('default', None)),
|
||||
'autoincrement': col.get('autoincrement', False)
|
||||
}
|
||||
for col in columns
|
||||
},
|
||||
'column_count': len(columns)
|
||||
}
|
||||
|
||||
# Get primary keys
|
||||
pk = inspector.get_pk_constraint(table_name)
|
||||
info['tables'][table_name]['primary_key'] = pk.get('constrained_columns', [])
|
||||
|
||||
# Get indexes
|
||||
indexes = inspector.get_indexes(table_name)
|
||||
info['indexes'][table_name] = [
|
||||
{
|
||||
'name': idx['name'],
|
||||
'columns': idx['column_names'],
|
||||
'unique': idx['unique']
|
||||
}
|
||||
for idx in indexes
|
||||
]
|
||||
|
||||
# Get foreign keys
|
||||
fks = inspector.get_foreign_keys(table_name)
|
||||
info['foreign_keys'][table_name] = [
|
||||
{
|
||||
'name': fk.get('name'),
|
||||
'columns': fk['constrained_columns'],
|
||||
'referred_table': fk['referred_table'],
|
||||
'referred_columns': fk['referred_columns']
|
||||
}
|
||||
for fk in fks
|
||||
]
|
||||
|
||||
# Get sequences
|
||||
with engine.connect() as conn:
|
||||
result = conn.execute(text("""
|
||||
SELECT sequence_name
|
||||
FROM information_schema.sequences
|
||||
WHERE sequence_schema = 'public'
|
||||
"""))
|
||||
info['sequences'] = [row[0] for row in result]
|
||||
|
||||
# Get enum types
|
||||
result = conn.execute(text("""
|
||||
SELECT t.typname as enum_name,
|
||||
array_agg(e.enumlabel ORDER BY e.enumsortorder) as enum_values
|
||||
FROM pg_type t
|
||||
JOIN pg_enum e ON t.oid = e.enumtypid
|
||||
WHERE t.typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = 'public')
|
||||
GROUP BY t.typname
|
||||
"""))
|
||||
info['enums'] = {row[0]: row[1] for row in result}
|
||||
|
||||
return info
|
||||
|
||||
def compare_tables(dev_info, prod_info):
|
||||
"""Compare tables between databases"""
|
||||
dev_tables = set(dev_info['tables'].keys())
|
||||
prod_tables = set(prod_info['tables'].keys())
|
||||
|
||||
print("\n" + "="*80)
|
||||
print("TABLE COMPARISON")
|
||||
print("="*80)
|
||||
|
||||
# Tables only in dev
|
||||
dev_only = dev_tables - prod_tables
|
||||
if dev_only:
|
||||
print(f"\n❌ Tables only in DEV ({len(dev_only)}):")
|
||||
for table in sorted(dev_only):
|
||||
print(f" - {table}")
|
||||
|
||||
# Tables only in prod
|
||||
prod_only = prod_tables - dev_tables
|
||||
if prod_only:
|
||||
print(f"\n❌ Tables only in PROD ({len(prod_only)}):")
|
||||
for table in sorted(prod_only):
|
||||
print(f" - {table}")
|
||||
|
||||
# Common tables
|
||||
common = dev_tables & prod_tables
|
||||
print(f"\n✅ Common tables: {len(common)}")
|
||||
|
||||
return common
|
||||
|
||||
def compare_columns(dev_info, prod_info, common_tables):
|
||||
"""Compare columns for common tables"""
|
||||
print("\n" + "="*80)
|
||||
print("COLUMN COMPARISON")
|
||||
print("="*80)
|
||||
|
||||
issues = []
|
||||
|
||||
for table in sorted(common_tables):
|
||||
dev_cols = set(dev_info['tables'][table]['columns'].keys())
|
||||
prod_cols = set(prod_info['tables'][table]['columns'].keys())
|
||||
|
||||
dev_only = dev_cols - prod_cols
|
||||
prod_only = prod_cols - dev_cols
|
||||
|
||||
if dev_only or prod_only:
|
||||
print(f"\n⚠️ Table '{table}' has column differences:")
|
||||
|
||||
if dev_only:
|
||||
print(f" Columns only in DEV: {', '.join(sorted(dev_only))}")
|
||||
issues.append(f"{table}: DEV-only columns: {', '.join(dev_only)}")
|
||||
|
||||
if prod_only:
|
||||
print(f" Columns only in PROD: {', '.join(sorted(prod_only))}")
|
||||
issues.append(f"{table}: PROD-only columns: {', '.join(prod_only)}")
|
||||
|
||||
# Compare column types for common columns
|
||||
common_cols = dev_cols & prod_cols
|
||||
for col in common_cols:
|
||||
dev_col = dev_info['tables'][table]['columns'][col]
|
||||
prod_col = prod_info['tables'][table]['columns'][col]
|
||||
|
||||
if dev_col['type'] != prod_col['type']:
|
||||
print(f" ⚠️ Column '{col}' type mismatch:")
|
||||
print(f" DEV: {dev_col['type']}")
|
||||
print(f" PROD: {prod_col['type']}")
|
||||
issues.append(f"{table}.{col}: Type mismatch")
|
||||
|
||||
if dev_col['nullable'] != prod_col['nullable']:
|
||||
print(f" ⚠️ Column '{col}' nullable mismatch:")
|
||||
print(f" DEV: {dev_col['nullable']}")
|
||||
print(f" PROD: {prod_col['nullable']}")
|
||||
issues.append(f"{table}.{col}: Nullable mismatch")
|
||||
|
||||
if not issues:
|
||||
print("\n✅ All columns match between DEV and PROD")
|
||||
|
||||
return issues
|
||||
|
||||
def compare_enums(dev_info, prod_info):
|
||||
"""Compare enum types"""
|
||||
print("\n" + "="*80)
|
||||
print("ENUM TYPE COMPARISON")
|
||||
print("="*80)
|
||||
|
||||
dev_enums = set(dev_info['enums'].keys())
|
||||
prod_enums = set(prod_info['enums'].keys())
|
||||
|
||||
dev_only = dev_enums - prod_enums
|
||||
prod_only = prod_enums - dev_enums
|
||||
|
||||
issues = []
|
||||
|
||||
if dev_only:
|
||||
print(f"\n❌ Enums only in DEV: {', '.join(sorted(dev_only))}")
|
||||
issues.extend([f"Enum '{e}' only in DEV" for e in dev_only])
|
||||
|
||||
if prod_only:
|
||||
print(f"\n❌ Enums only in PROD: {', '.join(sorted(prod_only))}")
|
||||
issues.extend([f"Enum '{e}' only in PROD" for e in prod_only])
|
||||
|
||||
# Compare enum values for common enums
|
||||
common = dev_enums & prod_enums
|
||||
for enum_name in sorted(common):
|
||||
dev_values = set(dev_info['enums'][enum_name])
|
||||
prod_values = set(prod_info['enums'][enum_name])
|
||||
|
||||
if dev_values != prod_values:
|
||||
print(f"\n⚠️ Enum '{enum_name}' values differ:")
|
||||
print(f" DEV: {', '.join(sorted(dev_values))}")
|
||||
print(f" PROD: {', '.join(sorted(prod_values))}")
|
||||
issues.append(f"Enum '{enum_name}' values differ")
|
||||
|
||||
if not issues:
|
||||
print("\n✅ All enum types match")
|
||||
|
||||
return issues
|
||||
|
||||
def check_migration_history(dev_engine, prod_engine):
|
||||
"""Check Alembic migration history"""
|
||||
print("\n" + "="*80)
|
||||
print("MIGRATION HISTORY")
|
||||
print("="*80)
|
||||
|
||||
try:
|
||||
with dev_engine.connect() as dev_conn:
|
||||
dev_result = dev_conn.execute(text("SELECT version_num FROM alembic_version"))
|
||||
dev_version = dev_result.fetchone()
|
||||
dev_version = dev_version[0] if dev_version else None
|
||||
|
||||
with prod_engine.connect() as prod_conn:
|
||||
prod_result = prod_conn.execute(text("SELECT version_num FROM alembic_version"))
|
||||
prod_version = prod_result.fetchone()
|
||||
prod_version = prod_version[0] if prod_version else None
|
||||
|
||||
print(f"\nDEV migration version: {dev_version}")
|
||||
print(f"PROD migration version: {prod_version}")
|
||||
|
||||
if dev_version == prod_version:
|
||||
print("✅ Migration versions match")
|
||||
return []
|
||||
else:
|
||||
print("❌ Migration versions DO NOT match")
|
||||
return ["Migration versions differ"]
|
||||
|
||||
except Exception as e:
|
||||
print(f"⚠️ Could not check migration history: {str(e)}")
|
||||
return [f"Migration check failed: {str(e)}"]
|
||||
|
||||
def get_row_counts(engine, tables):
|
||||
"""Get row counts for all tables"""
|
||||
counts = {}
|
||||
with engine.connect() as conn:
|
||||
for table in tables:
|
||||
result = conn.execute(text(f"SELECT COUNT(*) FROM {table}"))
|
||||
counts[table] = result.fetchone()[0]
|
||||
return counts
|
||||
|
||||
def compare_data_counts(dev_engine, prod_engine, common_tables):
|
||||
"""Compare row counts between databases"""
|
||||
print("\n" + "="*80)
|
||||
print("DATA ROW COUNTS")
|
||||
print("="*80)
|
||||
|
||||
print("\nGetting DEV row counts...")
|
||||
dev_counts = get_row_counts(dev_engine, common_tables)
|
||||
|
||||
print("Getting PROD row counts...")
|
||||
prod_counts = get_row_counts(prod_engine, common_tables)
|
||||
|
||||
print(f"\n{'Table':<30} {'DEV':<15} {'PROD':<15} {'Diff':<15}")
|
||||
print("-" * 75)
|
||||
|
||||
for table in sorted(common_tables):
|
||||
dev_count = dev_counts[table]
|
||||
prod_count = prod_counts[table]
|
||||
diff = dev_count - prod_count
|
||||
diff_str = f"+{diff}" if diff > 0 else str(diff)
|
||||
|
||||
status = "⚠️ " if abs(diff) > 0 else "✅"
|
||||
print(f"{status} {table:<28} {dev_count:<15} {prod_count:<15} {diff_str:<15}")
|
||||
|
||||
def main():
|
||||
print("\n" + "="*80)
|
||||
print("DATABASE INTEGRITY CHECKER")
|
||||
print("="*80)
|
||||
print(f"\nDEV: {DEV_DB.split('@')[1]}") # Hide password
|
||||
print(f"PROD: {PROD_DB.split('@')[1]}")
|
||||
|
||||
try:
|
||||
# Connect to databases
|
||||
print("\n🔌 Connecting to databases...")
|
||||
dev_engine = create_engine(DEV_DB)
|
||||
prod_engine = create_engine(PROD_DB)
|
||||
|
||||
# Test connections
|
||||
with dev_engine.connect() as conn:
|
||||
conn.execute(text("SELECT 1"))
|
||||
print("✅ Connected to DEV database")
|
||||
|
||||
with prod_engine.connect() as conn:
|
||||
conn.execute(text("SELECT 1"))
|
||||
print("✅ Connected to PROD database")
|
||||
|
||||
# Get database info
|
||||
print("\n📊 Gathering database information...")
|
||||
dev_info = get_db_info(dev_engine, "DEV")
|
||||
prod_info = get_db_info(prod_engine, "PROD")
|
||||
|
||||
# Run comparisons
|
||||
all_issues = []
|
||||
|
||||
common_tables = compare_tables(dev_info, prod_info)
|
||||
|
||||
column_issues = compare_columns(dev_info, prod_info, common_tables)
|
||||
all_issues.extend(column_issues)
|
||||
|
||||
enum_issues = compare_enums(dev_info, prod_info)
|
||||
all_issues.extend(enum_issues)
|
||||
|
||||
migration_issues = check_migration_history(dev_engine, prod_engine)
|
||||
all_issues.extend(migration_issues)
|
||||
|
||||
compare_data_counts(dev_engine, prod_engine, common_tables)
|
||||
|
||||
# Summary
|
||||
print("\n" + "="*80)
|
||||
print("SUMMARY")
|
||||
print("="*80)
|
||||
|
||||
if all_issues:
|
||||
print(f"\n❌ Found {len(all_issues)} integrity issues:")
|
||||
for i, issue in enumerate(all_issues, 1):
|
||||
print(f" {i}. {issue}")
|
||||
print("\n⚠️ Databases are NOT in sync!")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("\n✅ Databases are in sync!")
|
||||
print("✅ No integrity issues found")
|
||||
sys.exit(0)
|
||||
|
||||
except Exception as e:
|
||||
print(f"\n❌ Error: {str(e)}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
238
check_db_status.py
Executable file
238
check_db_status.py
Executable file
@@ -0,0 +1,238 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Database Migration Status Checker
|
||||
Checks what migration steps have been completed and what's missing
|
||||
"""
|
||||
import sys
|
||||
import os
|
||||
from sqlalchemy import create_engine, text, inspect
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from dotenv import load_dotenv
|
||||
|
||||
# Load environment variables
|
||||
load_dotenv()
|
||||
|
||||
# Get database URL from environment or use provided one
|
||||
DATABASE_URL = os.getenv('DATABASE_URL')
|
||||
|
||||
if not DATABASE_URL:
|
||||
print("ERROR: DATABASE_URL not found in environment")
|
||||
sys.exit(1)
|
||||
|
||||
# Create database connection
|
||||
engine = create_engine(DATABASE_URL)
|
||||
Session = sessionmaker(bind=engine)
|
||||
db = Session()
|
||||
inspector = inspect(engine)
|
||||
|
||||
print("=" * 80)
|
||||
print("DATABASE MIGRATION STATUS CHECKER")
|
||||
print("=" * 80)
|
||||
print(f"\nConnected to: {DATABASE_URL.split('@')[1] if '@' in DATABASE_URL else 'database'}")
|
||||
print()
|
||||
|
||||
# Colors for output
|
||||
class Colors:
|
||||
GREEN = '\033[92m'
|
||||
RED = '\033[91m'
|
||||
YELLOW = '\033[93m'
|
||||
BLUE = '\033[94m'
|
||||
END = '\033[0m'
|
||||
|
||||
def check_mark(exists):
|
||||
return f"{Colors.GREEN}✓{Colors.END}" if exists else f"{Colors.RED}✗{Colors.END}"
|
||||
|
||||
def warning(exists):
|
||||
return f"{Colors.YELLOW}⚠{Colors.END}" if exists else f"{Colors.GREEN}✓{Colors.END}"
|
||||
|
||||
issues = []
|
||||
warnings = []
|
||||
|
||||
# ============================================================
|
||||
# Check 1: Does roles table exist?
|
||||
# ============================================================
|
||||
print(f"{Colors.BLUE}[1] Checking if 'roles' table exists...{Colors.END}")
|
||||
roles_table_exists = 'roles' in inspector.get_table_names()
|
||||
print(f" {check_mark(roles_table_exists)} roles table exists")
|
||||
|
||||
if not roles_table_exists:
|
||||
issues.append("❌ MISSING: 'roles' table - run migration 006_add_dynamic_roles.sql")
|
||||
print(f"\n{Colors.RED}ISSUE: roles table not found!{Colors.END}")
|
||||
print(f" Action: Run 'psql $DATABASE_URL -f migrations/006_add_dynamic_roles.sql'")
|
||||
|
||||
# ============================================================
|
||||
# Check 2: Does users table have role_id column?
|
||||
# ============================================================
|
||||
print(f"\n{Colors.BLUE}[2] Checking if 'users' table has 'role_id' column...{Colors.END}")
|
||||
users_columns = [col['name'] for col in inspector.get_columns('users')]
|
||||
users_has_role_id = 'role_id' in users_columns
|
||||
print(f" {check_mark(users_has_role_id)} users.role_id column exists")
|
||||
|
||||
if not users_has_role_id:
|
||||
issues.append("❌ MISSING: 'users.role_id' column - run migration 006_add_dynamic_roles.sql")
|
||||
print(f"\n{Colors.RED}ISSUE: users.role_id column not found!{Colors.END}")
|
||||
print(f" Action: Run 'psql $DATABASE_URL -f migrations/006_add_dynamic_roles.sql'")
|
||||
|
||||
# ============================================================
|
||||
# Check 3: Does role_permissions table have role_id column?
|
||||
# ============================================================
|
||||
print(f"\n{Colors.BLUE}[3] Checking if 'role_permissions' table has 'role_id' column...{Colors.END}")
|
||||
rp_columns = [col['name'] for col in inspector.get_columns('role_permissions')]
|
||||
rp_has_role_id = 'role_id' in rp_columns
|
||||
print(f" {check_mark(rp_has_role_id)} role_permissions.role_id column exists")
|
||||
|
||||
if not rp_has_role_id:
|
||||
issues.append("❌ MISSING: 'role_permissions.role_id' column - run migration 006_add_dynamic_roles.sql")
|
||||
print(f"\n{Colors.RED}ISSUE: role_permissions.role_id column not found!{Colors.END}")
|
||||
print(f" Action: Run 'psql $DATABASE_URL -f migrations/006_add_dynamic_roles.sql'")
|
||||
|
||||
# ============================================================
|
||||
# Check 4: Are system roles seeded?
|
||||
# ============================================================
|
||||
if roles_table_exists:
|
||||
print(f"\n{Colors.BLUE}[4] Checking if system roles are seeded...{Colors.END}")
|
||||
result = db.execute(text("SELECT COUNT(*) as count FROM roles WHERE is_system_role = true"))
|
||||
system_roles_count = result.scalar()
|
||||
print(f" System roles found: {system_roles_count}")
|
||||
|
||||
if system_roles_count == 0:
|
||||
issues.append("❌ MISSING: System roles not seeded - run roles_seed.py")
|
||||
print(f" {Colors.RED}✗ No system roles found!{Colors.END}")
|
||||
print(f" Action: Run 'python3 roles_seed.py'")
|
||||
elif system_roles_count < 5:
|
||||
warnings.append(f"⚠️ WARNING: Expected 5 system roles, found {system_roles_count}")
|
||||
print(f" {Colors.YELLOW}⚠{Colors.END} Expected 5 roles, found {system_roles_count}")
|
||||
print(f" Action: Run 'python3 roles_seed.py' to ensure all roles exist")
|
||||
else:
|
||||
print(f" {Colors.GREEN}✓{Colors.END} All system roles seeded")
|
||||
|
||||
# Show which roles exist
|
||||
result = db.execute(text("SELECT code, name FROM roles WHERE is_system_role = true ORDER BY code"))
|
||||
existing_roles = result.fetchall()
|
||||
if existing_roles:
|
||||
print(f"\n Existing roles:")
|
||||
for role in existing_roles:
|
||||
print(f" - {role[0]}: {role[1]}")
|
||||
|
||||
# ============================================================
|
||||
# Check 5: Are users migrated to dynamic roles?
|
||||
# ============================================================
|
||||
if users_has_role_id and roles_table_exists:
|
||||
print(f"\n{Colors.BLUE}[5] Checking if users are migrated to dynamic roles...{Colors.END}")
|
||||
|
||||
# Count total users
|
||||
result = db.execute(text("SELECT COUNT(*) FROM users"))
|
||||
total_users = result.scalar()
|
||||
print(f" Total users: {total_users}")
|
||||
|
||||
# Count users with role_id set
|
||||
result = db.execute(text("SELECT COUNT(*) FROM users WHERE role_id IS NOT NULL"))
|
||||
migrated_users = result.scalar()
|
||||
print(f" Migrated users (with role_id): {migrated_users}")
|
||||
|
||||
# Count users without role_id
|
||||
unmigrated_users = total_users - migrated_users
|
||||
|
||||
if unmigrated_users > 0:
|
||||
issues.append(f"❌ INCOMPLETE: {unmigrated_users} users not migrated to dynamic roles")
|
||||
print(f" {Colors.RED}✗ {unmigrated_users} users still need migration!{Colors.END}")
|
||||
print(f" Action: Run 'python3 migrate_users_to_dynamic_roles.py'")
|
||||
|
||||
# Show sample unmigrated users
|
||||
result = db.execute(text("""
|
||||
SELECT email, role FROM users
|
||||
WHERE role_id IS NULL
|
||||
LIMIT 5
|
||||
"""))
|
||||
unmigrated = result.fetchall()
|
||||
if unmigrated:
|
||||
print(f"\n Sample unmigrated users:")
|
||||
for user in unmigrated:
|
||||
print(f" - {user[0]} (role: {user[1]})")
|
||||
else:
|
||||
print(f" {Colors.GREEN}✓{Colors.END} All users migrated to dynamic roles")
|
||||
|
||||
# ============================================================
|
||||
# Check 6: Are role permissions migrated?
|
||||
# ============================================================
|
||||
if rp_has_role_id and roles_table_exists:
|
||||
print(f"\n{Colors.BLUE}[6] Checking if role permissions are migrated...{Colors.END}")
|
||||
|
||||
# Count total role_permissions
|
||||
result = db.execute(text("SELECT COUNT(*) FROM role_permissions"))
|
||||
total_perms = result.scalar()
|
||||
print(f" Total role_permissions: {total_perms}")
|
||||
|
||||
# Count permissions with role_id set
|
||||
result = db.execute(text("SELECT COUNT(*) FROM role_permissions WHERE role_id IS NOT NULL"))
|
||||
migrated_perms = result.scalar()
|
||||
print(f" Migrated permissions (with role_id): {migrated_perms}")
|
||||
|
||||
unmigrated_perms = total_perms - migrated_perms
|
||||
|
||||
if unmigrated_perms > 0:
|
||||
issues.append(f"❌ INCOMPLETE: {unmigrated_perms} permissions not migrated to dynamic roles")
|
||||
print(f" {Colors.RED}✗ {unmigrated_perms} permissions still need migration!{Colors.END}")
|
||||
print(f" Action: Run 'python3 migrate_role_permissions_to_dynamic_roles.py'")
|
||||
else:
|
||||
print(f" {Colors.GREEN}✓{Colors.END} All permissions migrated to dynamic roles")
|
||||
|
||||
# ============================================================
|
||||
# Check 7: Verify admin account
|
||||
# ============================================================
|
||||
print(f"\n{Colors.BLUE}[7] Checking admin account...{Colors.END}")
|
||||
result = db.execute(text("""
|
||||
SELECT email, role, role_id
|
||||
FROM users
|
||||
WHERE email LIKE '%admin%' OR role = 'admin' OR role = 'superadmin'
|
||||
LIMIT 5
|
||||
"""))
|
||||
admin_users = result.fetchall()
|
||||
|
||||
if admin_users:
|
||||
print(f" Found {len(admin_users)} admin/superadmin users:")
|
||||
for user in admin_users:
|
||||
role_id_status = "✓" if user[2] else "✗"
|
||||
print(f" {role_id_status} {user[0]} (role: {user[1]}, role_id: {user[2] or 'NULL'})")
|
||||
else:
|
||||
warnings.append("⚠️ WARNING: No admin users found")
|
||||
print(f" {Colors.YELLOW}⚠{Colors.END} No admin users found in database")
|
||||
|
||||
# ============================================================
|
||||
# Summary
|
||||
# ============================================================
|
||||
print("\n" + "=" * 80)
|
||||
print("SUMMARY")
|
||||
print("=" * 80)
|
||||
|
||||
if not issues and not warnings:
|
||||
print(f"\n{Colors.GREEN}✓ All migration steps completed successfully!{Colors.END}")
|
||||
print("\nNext steps:")
|
||||
print(" 1. Deploy latest backend code")
|
||||
print(" 2. Restart backend server")
|
||||
print(" 3. Test /api/admin/users/export endpoint")
|
||||
else:
|
||||
if issues:
|
||||
print(f"\n{Colors.RED}ISSUES FOUND ({len(issues)}):{Colors.END}")
|
||||
for i, issue in enumerate(issues, 1):
|
||||
print(f" {i}. {issue}")
|
||||
|
||||
if warnings:
|
||||
print(f"\n{Colors.YELLOW}WARNINGS ({len(warnings)}):{Colors.END}")
|
||||
for i, warning in enumerate(warnings, 1):
|
||||
print(f" {i}. {warning}")
|
||||
|
||||
print(f"\n{Colors.BLUE}RECOMMENDED ACTIONS:{Colors.END}")
|
||||
if not roles_table_exists or not users_has_role_id or not rp_has_role_id:
|
||||
print(" 1. Run: psql $DATABASE_URL -f migrations/006_add_dynamic_roles.sql")
|
||||
if roles_table_exists and system_roles_count == 0:
|
||||
print(" 2. Run: python3 roles_seed.py")
|
||||
if unmigrated_users > 0:
|
||||
print(" 3. Run: python3 migrate_users_to_dynamic_roles.py")
|
||||
if unmigrated_perms > 0:
|
||||
print(" 4. Run: python3 migrate_role_permissions_to_dynamic_roles.py")
|
||||
print(" 5. Deploy latest backend code and restart server")
|
||||
|
||||
print("\n" + "=" * 80)
|
||||
|
||||
db.close()
|
||||
37
check_permissions.py
Normal file
37
check_permissions.py
Normal file
@@ -0,0 +1,37 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Check permissions table status
|
||||
"""
|
||||
import sys
|
||||
import os
|
||||
from sqlalchemy import create_engine, text
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
DATABASE_URL = os.getenv('DATABASE_URL')
|
||||
|
||||
engine = create_engine(DATABASE_URL)
|
||||
Session = sessionmaker(bind=engine)
|
||||
db = Session()
|
||||
|
||||
print("Checking permissions table...")
|
||||
print("=" * 80)
|
||||
|
||||
# Check if permissions table exists
|
||||
result = db.execute(text("SELECT COUNT(*) FROM permissions"))
|
||||
count = result.scalar()
|
||||
|
||||
print(f"Total permissions in database: {count}")
|
||||
|
||||
if count > 0:
|
||||
print("\nSample permissions:")
|
||||
result = db.execute(text("SELECT code, name, module FROM permissions LIMIT 10"))
|
||||
for perm in result.fetchall():
|
||||
print(f" - {perm[0]}: {perm[1]} (module: {perm[2]})")
|
||||
else:
|
||||
print("\n⚠️ WARNING: Permissions table is EMPTY!")
|
||||
print("\nThis will cause permission checks to fail.")
|
||||
print("\nAction needed: Run 'python3 seed_permissions.py'")
|
||||
|
||||
db.close()
|
||||
58
check_schema_mismatches.py
Normal file
58
check_schema_mismatches.py
Normal file
@@ -0,0 +1,58 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Check for schema mismatches between models.py and database
|
||||
"""
|
||||
import os
|
||||
from sqlalchemy import create_engine, inspect
|
||||
from dotenv import load_dotenv
|
||||
from models import Base
|
||||
|
||||
load_dotenv()
|
||||
|
||||
# Connect to database
|
||||
engine = create_engine(os.getenv('DATABASE_URL'))
|
||||
inspector = inspect(engine)
|
||||
|
||||
print("=" * 80)
|
||||
print("SCHEMA MISMATCH DETECTION")
|
||||
print("=" * 80)
|
||||
|
||||
mismatches = []
|
||||
|
||||
# Check each model
|
||||
for table_name, table in Base.metadata.tables.items():
|
||||
print(f"\n📋 Checking table: {table_name}")
|
||||
|
||||
# Get columns from database
|
||||
try:
|
||||
db_columns = {col['name'] for col in inspector.get_columns(table_name)}
|
||||
except Exception as e:
|
||||
print(f" ❌ Table doesn't exist in database: {e}")
|
||||
mismatches.append(f"{table_name}: Table missing in database")
|
||||
continue
|
||||
|
||||
# Get columns from model
|
||||
model_columns = {col.name for col in table.columns}
|
||||
|
||||
# Find missing columns
|
||||
missing_in_db = model_columns - db_columns
|
||||
extra_in_db = db_columns - model_columns
|
||||
|
||||
if missing_in_db:
|
||||
print(f" ⚠️ Missing in DATABASE: {missing_in_db}")
|
||||
mismatches.append(f"{table_name}: Missing in DB: {missing_in_db}")
|
||||
|
||||
if extra_in_db:
|
||||
print(f" ℹ️ Extra in DATABASE (not in model): {extra_in_db}")
|
||||
|
||||
if not missing_in_db and not extra_in_db:
|
||||
print(f" ✅ Schema matches!")
|
||||
|
||||
print("\n" + "=" * 80)
|
||||
if mismatches:
|
||||
print(f"❌ FOUND {len(mismatches)} MISMATCHES:")
|
||||
for mismatch in mismatches:
|
||||
print(f" - {mismatch}")
|
||||
else:
|
||||
print("✅ ALL SCHEMAS MATCH!")
|
||||
print("=" * 80)
|
||||
17
clear_permissions.sql
Normal file
17
clear_permissions.sql
Normal file
@@ -0,0 +1,17 @@
|
||||
-- Clear all permissions and role_permissions
|
||||
-- Run this BEFORE running seed_permissions_rbac.py
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Delete all role_permission mappings
|
||||
DELETE FROM role_permissions;
|
||||
|
||||
-- Delete all permissions
|
||||
DELETE FROM permissions;
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- Verify they're empty
|
||||
SELECT 'Permissions cleared:' as message, COUNT(*) as count FROM permissions
|
||||
UNION ALL
|
||||
SELECT 'Role-permissions cleared:', COUNT(*) FROM role_permissions;
|
||||
220
create_admin.py
220
create_admin.py
@@ -1,73 +1,203 @@
|
||||
"""
|
||||
Create an admin user for testing.
|
||||
Run this script to add an admin account to your database.
|
||||
Create a superadmin user interactively.
|
||||
Run this script to add a superadmin account to your database.
|
||||
"""
|
||||
|
||||
import getpass
|
||||
import re
|
||||
from database import SessionLocal
|
||||
from models import User, UserStatus, UserRole
|
||||
from models import User, UserStatus, UserRole, Role
|
||||
from auth import get_password_hash
|
||||
from datetime import datetime, timezone
|
||||
import sys
|
||||
|
||||
def create_admin():
|
||||
"""Create an admin user"""
|
||||
def validate_email(email):
|
||||
"""Validate email format"""
|
||||
pattern = r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$'
|
||||
return re.match(pattern, email) is not None
|
||||
|
||||
def validate_phone(phone):
|
||||
"""Validate phone format (simple check)"""
|
||||
# Remove common separators
|
||||
cleaned = phone.replace('-', '').replace('(', '').replace(')', '').replace(' ', '').replace('.', '')
|
||||
return len(cleaned) >= 10 and cleaned.isdigit()
|
||||
|
||||
def validate_zipcode(zipcode):
|
||||
"""Validate US zipcode format"""
|
||||
return len(zipcode) == 5 and zipcode.isdigit()
|
||||
|
||||
def get_input(prompt, validator=None, required=True, default=None):
|
||||
"""Get user input with optional validation"""
|
||||
while True:
|
||||
if default:
|
||||
user_input = input(f"{prompt} [{default}]: ").strip()
|
||||
if not user_input:
|
||||
return default
|
||||
else:
|
||||
user_input = input(f"{prompt}: ").strip()
|
||||
|
||||
if not user_input and not required:
|
||||
return None
|
||||
|
||||
if not user_input and required:
|
||||
print("❌ This field is required. Please try again.")
|
||||
continue
|
||||
|
||||
if validator and not validator(user_input):
|
||||
print("❌ Invalid format. Please try again.")
|
||||
continue
|
||||
|
||||
return user_input
|
||||
|
||||
def get_password():
|
||||
"""Get password with confirmation and validation"""
|
||||
while True:
|
||||
password = getpass.getpass("Password (min 8 characters): ")
|
||||
|
||||
if len(password) < 8:
|
||||
print("❌ Password must be at least 8 characters long.")
|
||||
continue
|
||||
|
||||
confirm = getpass.getpass("Confirm password: ")
|
||||
|
||||
if password != confirm:
|
||||
print("❌ Passwords do not match. Please try again.")
|
||||
continue
|
||||
|
||||
return password
|
||||
|
||||
def create_superadmin():
|
||||
"""Create a superadmin user interactively"""
|
||||
db = SessionLocal()
|
||||
|
||||
try:
|
||||
# Check if admin already exists
|
||||
existing_admin = db.query(User).filter(
|
||||
User.email == "admin@loaf.org"
|
||||
).first()
|
||||
print("\n" + "="*60)
|
||||
print("🔧 LOAF Membership Platform - Superadmin Creation")
|
||||
print("="*60 + "\n")
|
||||
|
||||
if existing_admin:
|
||||
print(f"⚠️ Admin user already exists: {existing_admin.email}")
|
||||
print(f" Role: {existing_admin.role.value}")
|
||||
print(f" Status: {existing_admin.status.value}")
|
||||
# Get user information interactively
|
||||
print("📝 Please provide the superadmin account details:\n")
|
||||
|
||||
email = get_input(
|
||||
"Email address",
|
||||
validator=validate_email,
|
||||
required=True
|
||||
)
|
||||
|
||||
# Check if user already exists
|
||||
existing_user = db.query(User).filter(User.email == email).first()
|
||||
|
||||
if existing_user:
|
||||
print(f"\n⚠️ User with email '{email}' already exists!")
|
||||
print(f" Current Role: {existing_user.role.value}")
|
||||
print(f" Current Status: {existing_user.status.value}")
|
||||
|
||||
update = input("\n❓ Would you like to update this user to superadmin? (yes/no): ").strip().lower()
|
||||
|
||||
if update in ['yes', 'y']:
|
||||
existing_user.role = UserRole.superadmin
|
||||
existing_user.status = UserStatus.active
|
||||
existing_user.email_verified = True
|
||||
|
||||
# Assign superadmin role in dynamic RBAC if roles table exists
|
||||
try:
|
||||
superadmin_role = db.query(Role).filter(Role.code == 'superadmin').first()
|
||||
if superadmin_role and not existing_user.role_id:
|
||||
existing_user.role_id = superadmin_role.id
|
||||
except Exception:
|
||||
pass # Roles table might not exist yet
|
||||
|
||||
# Update to admin role if not already
|
||||
if existing_admin.role != UserRole.admin:
|
||||
existing_admin.role = UserRole.admin
|
||||
existing_admin.status = UserStatus.active
|
||||
existing_admin.email_verified = True
|
||||
db.commit()
|
||||
print("✅ Updated existing user to admin role")
|
||||
print("✅ User updated to superadmin successfully!")
|
||||
print(f" Email: {existing_user.email}")
|
||||
print(f" Role: {existing_user.role.value}")
|
||||
print(f" User ID: {existing_user.id}")
|
||||
else:
|
||||
print("❌ Operation cancelled.")
|
||||
return
|
||||
|
||||
print("Creating admin user...")
|
||||
password = get_password()
|
||||
|
||||
# Create admin user
|
||||
admin_user = User(
|
||||
email="admin@loaf.org",
|
||||
password_hash=get_password_hash("admin123"), # Change this password!
|
||||
first_name="Admin",
|
||||
last_name="User",
|
||||
phone="555-0001",
|
||||
address="123 Admin Street",
|
||||
city="Admin City",
|
||||
state="CA",
|
||||
zipcode="90001",
|
||||
date_of_birth=datetime(1990, 1, 1),
|
||||
print("\n👤 Personal Information:\n")
|
||||
|
||||
first_name = get_input("First name", required=True)
|
||||
last_name = get_input("Last name", required=True)
|
||||
phone = get_input("Phone number", validator=validate_phone, required=True)
|
||||
|
||||
print("\n📍 Address Information:\n")
|
||||
|
||||
address = get_input("Street address", required=True)
|
||||
city = get_input("City", required=True)
|
||||
state = get_input("State (2-letter code)", required=True, default="CA")
|
||||
zipcode = get_input("ZIP code", validator=validate_zipcode, required=True)
|
||||
|
||||
print("\n📅 Date of Birth (YYYY-MM-DD format):\n")
|
||||
|
||||
while True:
|
||||
dob_str = get_input("Date of birth (e.g., 1990-01-15)", required=True)
|
||||
try:
|
||||
date_of_birth = datetime.strptime(dob_str, "%Y-%m-%d")
|
||||
break
|
||||
except ValueError:
|
||||
print("❌ Invalid date format. Please use YYYY-MM-DD format.")
|
||||
|
||||
# Create superadmin user
|
||||
print("\n⏳ Creating superadmin user...")
|
||||
|
||||
superadmin_user = User(
|
||||
email=email,
|
||||
password_hash=get_password_hash(password),
|
||||
first_name=first_name,
|
||||
last_name=last_name,
|
||||
phone=phone,
|
||||
address=address,
|
||||
city=city,
|
||||
state=state.upper(),
|
||||
zipcode=zipcode,
|
||||
date_of_birth=date_of_birth,
|
||||
status=UserStatus.active,
|
||||
role=UserRole.admin,
|
||||
role=UserRole.superadmin,
|
||||
email_verified=True,
|
||||
newsletter_subscribed=False
|
||||
)
|
||||
|
||||
db.add(admin_user)
|
||||
db.commit()
|
||||
db.refresh(admin_user)
|
||||
|
||||
print("✅ Admin user created successfully!")
|
||||
print(f" Email: admin@loaf.org")
|
||||
print(f" Password: admin123")
|
||||
print(f" Role: {admin_user.role.value}")
|
||||
print(f" User ID: {admin_user.id}")
|
||||
print("\n⚠️ IMPORTANT: Change the password after first login!")
|
||||
db.add(superadmin_user)
|
||||
db.flush() # Flush to get the user ID before looking up roles
|
||||
|
||||
# Assign superadmin role in dynamic RBAC if roles table exists
|
||||
try:
|
||||
superadmin_role = db.query(Role).filter(Role.code == 'superadmin').first()
|
||||
if superadmin_role:
|
||||
superadmin_user.role_id = superadmin_role.id
|
||||
print(" ✓ Assigned dynamic superadmin role")
|
||||
except Exception as e:
|
||||
print(f"❌ Error creating admin user: {e}")
|
||||
print(f" ⚠️ Dynamic roles not yet set up (this is normal for fresh installs)")
|
||||
|
||||
db.commit()
|
||||
db.refresh(superadmin_user)
|
||||
|
||||
print("\n" + "="*60)
|
||||
print("✅ Superadmin user created successfully!")
|
||||
print("="*60)
|
||||
print(f"\n📧 Email: {superadmin_user.email}")
|
||||
print(f"👤 Name: {superadmin_user.first_name} {superadmin_user.last_name}")
|
||||
print(f"🔑 Role: {superadmin_user.role.value}")
|
||||
print(f"🆔 User ID: {superadmin_user.id}")
|
||||
print(f"\n✨ You can now log in to the admin panel at /admin/login")
|
||||
print("\n" + "="*60 + "\n")
|
||||
|
||||
except KeyboardInterrupt:
|
||||
print("\n\n❌ Operation cancelled by user.")
|
||||
db.rollback()
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
print(f"\n❌ Error creating superadmin user: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
db.rollback()
|
||||
sys.exit(1)
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
create_admin()
|
||||
create_superadmin()
|
||||
|
||||
146
create_superadmin.py
Normal file
146
create_superadmin.py
Normal file
@@ -0,0 +1,146 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Create Superadmin User Script
|
||||
Directly creates a superadmin user in the database for LOAF membership platform
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from getpass import getpass
|
||||
|
||||
# Add the backend directory to path for imports
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
def main():
|
||||
print("=" * 70)
|
||||
print("LOAF Membership Platform - Superadmin User Creator")
|
||||
print("=" * 70)
|
||||
print()
|
||||
|
||||
# Check for DATABASE_URL
|
||||
from dotenv import load_dotenv
|
||||
load_dotenv()
|
||||
|
||||
database_url = os.getenv("DATABASE_URL")
|
||||
if not database_url:
|
||||
print("❌ DATABASE_URL not found in environment or .env file")
|
||||
sys.exit(1)
|
||||
|
||||
# Get user input
|
||||
email = input("Email address: ").strip()
|
||||
if not email or '@' not in email:
|
||||
print("❌ Invalid email address")
|
||||
sys.exit(1)
|
||||
|
||||
first_name = input("First name: ").strip()
|
||||
if not first_name:
|
||||
print("❌ First name is required")
|
||||
sys.exit(1)
|
||||
|
||||
last_name = input("Last name: ").strip()
|
||||
if not last_name:
|
||||
print("❌ Last name is required")
|
||||
sys.exit(1)
|
||||
|
||||
# Get password securely
|
||||
password = getpass("Password: ")
|
||||
if len(password) < 8:
|
||||
print("❌ Password must be at least 8 characters")
|
||||
sys.exit(1)
|
||||
|
||||
password_confirm = getpass("Confirm password: ")
|
||||
if password != password_confirm:
|
||||
print("❌ Passwords do not match")
|
||||
sys.exit(1)
|
||||
|
||||
print()
|
||||
print("Creating superadmin user...")
|
||||
|
||||
try:
|
||||
# Import database dependencies
|
||||
from sqlalchemy import create_engine, text
|
||||
from passlib.context import CryptContext
|
||||
|
||||
# Create password hash
|
||||
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||
password_hash = pwd_context.hash(password)
|
||||
|
||||
# Connect to database
|
||||
engine = create_engine(database_url)
|
||||
|
||||
with engine.connect() as conn:
|
||||
# Check if user already exists
|
||||
result = conn.execute(
|
||||
text("SELECT id FROM users WHERE email = :email"),
|
||||
{"email": email}
|
||||
)
|
||||
if result.fetchone():
|
||||
print(f"❌ User with email '{email}' already exists")
|
||||
sys.exit(1)
|
||||
|
||||
# Insert superadmin user
|
||||
conn.execute(
|
||||
text("""
|
||||
INSERT INTO users (
|
||||
id, email, password_hash, first_name, last_name,
|
||||
phone, address, city, state, zipcode, date_of_birth,
|
||||
status, role, email_verified,
|
||||
newsletter_subscribed, accepts_tos,
|
||||
created_at, updated_at
|
||||
) VALUES (
|
||||
gen_random_uuid(),
|
||||
:email,
|
||||
:password_hash,
|
||||
:first_name,
|
||||
:last_name,
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
'1990-01-01',
|
||||
'active',
|
||||
'superadmin',
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
NOW(),
|
||||
NOW()
|
||||
)
|
||||
"""),
|
||||
{
|
||||
"email": email,
|
||||
"password_hash": password_hash,
|
||||
"first_name": first_name,
|
||||
"last_name": last_name
|
||||
}
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
print()
|
||||
print("=" * 70)
|
||||
print("✅ Superadmin user created successfully!")
|
||||
print("=" * 70)
|
||||
print()
|
||||
print(f" Email: {email}")
|
||||
print(f" Name: {first_name} {last_name}")
|
||||
print(f" Role: superadmin")
|
||||
print(f" Status: active")
|
||||
print()
|
||||
print("You can now log in with these credentials.")
|
||||
print("=" * 70)
|
||||
|
||||
except ImportError as e:
|
||||
print(f"❌ Missing dependency: {e}")
|
||||
print(" Run: pip install sqlalchemy psycopg2-binary passlib python-dotenv")
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
print(f"❌ Database error: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
print("\n\n❌ Cancelled by user")
|
||||
sys.exit(1)
|
||||
17
database.py
17
database.py
@@ -1,6 +1,7 @@
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.pool import QueuePool
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
from pathlib import Path
|
||||
@@ -10,7 +11,21 @@ load_dotenv(ROOT_DIR / '.env')
|
||||
|
||||
DATABASE_URL = os.environ.get('DATABASE_URL', 'postgresql://user:password@localhost:5432/membership_db')
|
||||
|
||||
engine = create_engine(DATABASE_URL)
|
||||
# Configure engine with connection pooling and connection health checks
|
||||
engine = create_engine(
|
||||
DATABASE_URL,
|
||||
poolclass=QueuePool,
|
||||
pool_size=5, # Keep 5 connections open
|
||||
max_overflow=10, # Allow up to 10 extra connections during peak
|
||||
pool_pre_ping=True, # CRITICAL: Test connections before using them
|
||||
pool_recycle=3600, # Recycle connections every hour (prevents stale connections)
|
||||
echo=False, # Set to True for SQL debugging
|
||||
connect_args={
|
||||
'connect_timeout': 10, # Timeout connection attempts after 10 seconds
|
||||
'options': '-c statement_timeout=30000' # 30 second query timeout
|
||||
}
|
||||
)
|
||||
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
71
deploy_rbac.sh
Executable file
71
deploy_rbac.sh
Executable file
@@ -0,0 +1,71 @@
|
||||
#!/bin/bash
|
||||
# Quick deployment script for RBAC system
|
||||
# Run this on your dev server after pulling latest code
|
||||
|
||||
set -e # Exit on any error
|
||||
|
||||
echo "========================================"
|
||||
echo "RBAC System Deployment Script"
|
||||
echo "========================================"
|
||||
echo ""
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Check if .env exists
|
||||
if [ ! -f .env ]; then
|
||||
echo -e "${RED}Error: .env file not found${NC}"
|
||||
echo "Please create .env file with DATABASE_URL"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Load environment variables
|
||||
source .env
|
||||
|
||||
# Function to check command success
|
||||
check_success() {
|
||||
if [ $? -eq 0 ]; then
|
||||
echo -e "${GREEN}✓ $1${NC}"
|
||||
else
|
||||
echo -e "${RED}✗ $1 failed${NC}"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
echo -e "${YELLOW}Step 1: Running schema migration...${NC}"
|
||||
psql $DATABASE_URL -f migrations/006_add_dynamic_roles.sql
|
||||
check_success "Schema migration"
|
||||
|
||||
echo ""
|
||||
echo -e "${YELLOW}Step 2: Seeding system roles...${NC}"
|
||||
python3 roles_seed.py
|
||||
check_success "Roles seeding"
|
||||
|
||||
echo ""
|
||||
echo -e "${YELLOW}Step 3: Migrating users to dynamic roles...${NC}"
|
||||
python3 migrate_users_to_dynamic_roles.py
|
||||
check_success "User migration"
|
||||
|
||||
echo ""
|
||||
echo -e "${YELLOW}Step 4: Migrating role permissions...${NC}"
|
||||
python3 migrate_role_permissions_to_dynamic_roles.py
|
||||
check_success "Role permissions migration"
|
||||
|
||||
echo ""
|
||||
echo -e "${YELLOW}Step 5: Verifying admin account...${NC}"
|
||||
python3 verify_admin_account.py
|
||||
check_success "Admin account verification"
|
||||
|
||||
echo ""
|
||||
echo "========================================"
|
||||
echo -e "${GREEN}✓ RBAC System Deployment Complete!${NC}"
|
||||
echo "========================================"
|
||||
echo ""
|
||||
echo "Next steps:"
|
||||
echo "1. Restart your backend server"
|
||||
echo "2. Test the /api/admin/users/export endpoint"
|
||||
echo "3. Login as admin and check /admin/permissions page"
|
||||
echo ""
|
||||
@@ -1,15 +0,0 @@
|
||||
|
||||
|
||||
services:
|
||||
backend:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile # Use Dockerfile.prod for production
|
||||
ports:
|
||||
- "8000:8000"
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
DATABASE_URL: "{DATABASE_URL}"
|
||||
volumes:
|
||||
- .:/app # sync code for hot reload
|
||||
439
docs/status_definitions.md
Normal file
439
docs/status_definitions.md
Normal file
@@ -0,0 +1,439 @@
|
||||
# Membership Status Definitions & Transitions
|
||||
|
||||
This document defines all user membership statuses, their meanings, valid transitions, and automated rules.
|
||||
|
||||
## Status Overview
|
||||
|
||||
| Status | Type | Description | Member Access |
|
||||
|--------|------|-------------|---------------|
|
||||
| `pending_email` | Registration | User registered, awaiting email verification | None |
|
||||
| `pending_validation` | Registration | Email verified, awaiting event attendance | Newsletter only |
|
||||
| `pre_validated` | Registration | Attended event or referred, ready for admin validation | Newsletter only |
|
||||
| `payment_pending` | Registration | Admin validated, awaiting payment | Newsletter only |
|
||||
| `active` | Active | Payment completed, full member access | Full access |
|
||||
| `inactive` | Inactive | Membership deactivated manually | None |
|
||||
| `canceled` | Terminated | User or admin canceled membership | None |
|
||||
| `expired` | Terminated | Subscription ended without renewal | Limited (historical) |
|
||||
| `abandoned` | Terminated | Incomplete registration after reminders | None |
|
||||
|
||||
---
|
||||
|
||||
## Detailed Status Definitions
|
||||
|
||||
### 1. pending_email
|
||||
|
||||
**Definition:** User has registered but not verified their email address.
|
||||
|
||||
**How User Enters:**
|
||||
- User completes registration form (Step 1-4)
|
||||
- System creates user account with `pending_email` status
|
||||
|
||||
**Valid Transitions:**
|
||||
- → `pending_validation` (email verified)
|
||||
- → `pre_validated` (email verified + referred by member)
|
||||
- → `abandoned` (optional: 30 days without verification after reminders)
|
||||
|
||||
**Member Access:**
|
||||
- Cannot login
|
||||
- Cannot access any member features
|
||||
- Not subscribed to newsletter
|
||||
|
||||
**Reminder Schedule:**
|
||||
- Day 3: First reminder email
|
||||
- Day 7: Second reminder email
|
||||
- Day 14: Third reminder email
|
||||
- Day 30: Final reminder (optional: transition to abandoned)
|
||||
|
||||
**Admin Actions:**
|
||||
- Can manually resend verification email
|
||||
- Can manually verify email (bypass)
|
||||
- Can delete user account
|
||||
|
||||
---
|
||||
|
||||
### 2. pending_validation
|
||||
|
||||
**Definition:** Email verified, user needs to attend an event within 90 days (per LOAF policy).
|
||||
|
||||
**How User Enters:**
|
||||
- Email verification successful (from `pending_email`)
|
||||
- 90-day countdown timer starts
|
||||
|
||||
**Valid Transitions:**
|
||||
- → `pre_validated` (attended event marked by admin)
|
||||
- → `abandoned` (90 days without event attendance - per policy)
|
||||
|
||||
**Member Access:**
|
||||
- Can login to view dashboard
|
||||
- Subscribed to newsletter
|
||||
- Cannot access member-only features
|
||||
- Can view public events
|
||||
|
||||
**Reminder Schedule:**
|
||||
- Day 30: "You have 60 days remaining to attend an event"
|
||||
- Day 60: "You have 30 days remaining to attend an event"
|
||||
- Day 80: "Reminder: 10 days left to attend an event"
|
||||
- Day 85: "Final reminder: 5 days left"
|
||||
- Day 90: Transition to `abandoned`, remove from newsletter
|
||||
|
||||
**Admin Actions:**
|
||||
- Can mark event attendance (triggers transition to `pre_validated`)
|
||||
- Can manually transition to `pre_validated` (bypass event requirement)
|
||||
- Can extend deadline
|
||||
|
||||
---
|
||||
|
||||
### 3. pre_validated
|
||||
|
||||
**Definition:** User attended event or was referred, awaiting admin validation.
|
||||
|
||||
**How User Enters:**
|
||||
- Admin marked event attendance (from `pending_validation`)
|
||||
- User registered with valid member referral (skipped `pending_validation`)
|
||||
|
||||
**Valid Transitions:**
|
||||
- → `payment_pending` (admin validates application)
|
||||
- → `inactive` (admin rejects application - rare)
|
||||
|
||||
**Member Access:**
|
||||
- Can login to view dashboard
|
||||
- Subscribed to newsletter
|
||||
- Cannot access member-only features
|
||||
- Can view public events
|
||||
|
||||
**Automated Rules:**
|
||||
- None (requires admin action)
|
||||
|
||||
**Admin Actions:**
|
||||
- Review application in Validation Queue
|
||||
- Validate → transition to `payment_pending` (sends payment email)
|
||||
- Reject → transition to `inactive` (sends rejection email)
|
||||
|
||||
---
|
||||
|
||||
### 4. payment_pending
|
||||
|
||||
**Definition:** Admin validated application, user needs to complete payment.
|
||||
|
||||
**How User Enters:**
|
||||
- Admin validates application (from `pre_validated`)
|
||||
- Payment email sent with Stripe Checkout link
|
||||
|
||||
**Valid Transitions:**
|
||||
- → `active` (payment successful via Stripe webhook)
|
||||
- → `abandoned` (optional: 60 days without payment after reminders)
|
||||
|
||||
**Member Access:**
|
||||
- Can login to view dashboard
|
||||
- Subscribed to newsletter
|
||||
- Cannot access member-only features
|
||||
- Can view subscription plans page
|
||||
|
||||
**Reminder Schedule:**
|
||||
- Day 7: First payment reminder
|
||||
- Day 14: Second payment reminder
|
||||
- Day 21: Third payment reminder
|
||||
- Day 30: Fourth payment reminder
|
||||
- Day 45: Fifth payment reminder
|
||||
- Day 60: Final reminder (optional: transition to abandoned)
|
||||
|
||||
**Note:** Since admin already validated this user, consider keeping them in this status indefinitely rather than auto-abandoning.
|
||||
|
||||
**Admin Actions:**
|
||||
- Can manually activate membership (for offline payments: cash, check, bank transfer)
|
||||
- Can resend payment email
|
||||
|
||||
---
|
||||
|
||||
### 5. active
|
||||
|
||||
**Definition:** Payment completed, full membership access granted.
|
||||
|
||||
**How User Enters:**
|
||||
- Stripe payment successful (from `payment_pending`)
|
||||
- Admin manually activated (offline payment)
|
||||
|
||||
**Valid Transitions:**
|
||||
- → `expired` (subscription end date reached without renewal)
|
||||
- → `canceled` (user or admin cancels membership)
|
||||
- → `inactive` (admin manually deactivates)
|
||||
|
||||
**Member Access:**
|
||||
- Full member dashboard access
|
||||
- All member-only features
|
||||
- Event RSVP and attendance tracking
|
||||
- Member directory listing
|
||||
- Newsletter subscribed
|
||||
|
||||
**Renewal Reminder Schedule:**
|
||||
- 60 days before expiration: First renewal reminder
|
||||
- 30 days before expiration: Second renewal reminder
|
||||
- 14 days before expiration: Third renewal reminder
|
||||
- 7 days before expiration: Final renewal reminder
|
||||
- On expiration: Transition to `expired`
|
||||
|
||||
**Admin Actions:**
|
||||
- Can cancel membership → `canceled`
|
||||
- Can manually deactivate → `inactive`
|
||||
- Can extend subscription end_date
|
||||
|
||||
---
|
||||
|
||||
### 6. inactive
|
||||
|
||||
**Definition:** Membership manually deactivated by admin.
|
||||
|
||||
**How User Enters:**
|
||||
- Admin manually sets status to `inactive`
|
||||
- Used for temporary suspensions or admin rejections
|
||||
|
||||
**Valid Transitions:**
|
||||
- → `active` (admin reactivates)
|
||||
- → `payment_pending` (admin prompts for payment)
|
||||
|
||||
**Member Access:**
|
||||
- Can login but no member features
|
||||
- Not subscribed to newsletter
|
||||
- Cannot access member-only content
|
||||
|
||||
**Automated Rules:**
|
||||
- None (requires admin action to exit)
|
||||
|
||||
**Admin Actions:**
|
||||
- Reactivate membership → `active`
|
||||
- Prompt for payment → `payment_pending`
|
||||
- Delete user account
|
||||
|
||||
---
|
||||
|
||||
### 7. canceled
|
||||
|
||||
**Definition:** Membership canceled by user or admin.
|
||||
|
||||
**How User Enters:**
|
||||
- User cancels subscription via Stripe portal
|
||||
- Admin cancels membership
|
||||
- Stripe webhook: `customer.subscription.deleted`
|
||||
|
||||
**Valid Transitions:**
|
||||
- → `payment_pending` (user requests to rejoin)
|
||||
- → `active` (admin reactivates with new subscription)
|
||||
|
||||
**Member Access:**
|
||||
- Can login to view dashboard (historical data)
|
||||
- Not subscribed to newsletter
|
||||
- Cannot access current member-only features
|
||||
- Can view historical event attendance
|
||||
|
||||
**Automated Rules:**
|
||||
- Stripe webhook triggers automatic transition
|
||||
|
||||
**Admin Actions:**
|
||||
- Can invite user to rejoin → `payment_pending`
|
||||
- Can manually reactivate → `active` (if subscription still valid)
|
||||
|
||||
---
|
||||
|
||||
### 8. expired
|
||||
|
||||
**Definition:** Subscription ended without renewal.
|
||||
|
||||
**How User Enters:**
|
||||
- Subscription `end_date` reached without renewal
|
||||
- Automated check runs daily
|
||||
|
||||
**Valid Transitions:**
|
||||
- → `payment_pending` (user chooses to renew)
|
||||
- → `active` (admin manually renews/extends)
|
||||
|
||||
**Member Access:**
|
||||
- Can login to view dashboard (historical data)
|
||||
- Not subscribed to newsletter
|
||||
- Cannot access current member-only features
|
||||
- Can view historical event attendance
|
||||
- Shown renewal prompts
|
||||
|
||||
**Automated Rules:**
|
||||
- Daily check for subscriptions past `end_date` → transition to `expired`
|
||||
- Send renewal invitation email on transition
|
||||
|
||||
**Post-Expiration Reminders:**
|
||||
- Immediate: Expiration notification + renewal link
|
||||
- 7 days after: Renewal reminder
|
||||
- 30 days after: Final renewal reminder
|
||||
- 90 days after: Optional cleanup/archive
|
||||
|
||||
**Admin Actions:**
|
||||
- Manually extend subscription → `active`
|
||||
- Send renewal invitation → `payment_pending`
|
||||
|
||||
---
|
||||
|
||||
### 9. abandoned
|
||||
|
||||
**Definition:** User failed to complete registration process after multiple reminders.
|
||||
|
||||
**How User Enters:**
|
||||
- From `pending_email`: 30 days without verification (optional - after 4 reminders)
|
||||
- From `pending_validation`: 90 days without event attendance (after 4 reminders)
|
||||
- From `payment_pending`: 60 days without payment (optional - after 6 reminders)
|
||||
|
||||
**Valid Transitions:**
|
||||
- → `pending_email` (admin resets application, resends verification)
|
||||
- → `pending_validation` (admin resets, manually verifies email)
|
||||
- → `payment_pending` (admin resets, bypasses requirements)
|
||||
|
||||
**Member Access:**
|
||||
- Cannot login
|
||||
- Not subscribed to newsletter
|
||||
- All access revoked
|
||||
|
||||
**Automated Rules:**
|
||||
- Send "incomplete application" notification email on transition
|
||||
- Optional: Purge from database after 180 days (configurable)
|
||||
|
||||
**Admin Actions:**
|
||||
- Can reset application → return to appropriate pending state
|
||||
- Can delete user account
|
||||
- Can view abandoned applications in admin dashboard
|
||||
|
||||
---
|
||||
|
||||
## State Transition Diagram
|
||||
|
||||
```
|
||||
┌──────────────┐
|
||||
│ Registration │
|
||||
│ (Guest) │
|
||||
└──────────────┘
|
||||
│
|
||||
↓
|
||||
┌───────────────┐ (30 days) ┌──────────┐
|
||||
│ pending_email │──────────────────→│abandoned │
|
||||
└───────────────┘ └──────────┘
|
||||
│ ↑
|
||||
(verify email) │
|
||||
│ │
|
||||
↓ │
|
||||
┌────────────────────┐ (90 days) │
|
||||
│pending_validation │───────────────────┘
|
||||
│ (or pre_validated) │
|
||||
└────────────────────┘
|
||||
│
|
||||
(event/admin)
|
||||
│
|
||||
↓
|
||||
┌────────────────┐
|
||||
│ pre_validated │
|
||||
└────────────────┘
|
||||
│
|
||||
(admin validates)
|
||||
│
|
||||
↓
|
||||
┌─────────────────┐ (60 days) ┌──────────┐
|
||||
│payment_pending │──────────────────→│abandoned │
|
||||
└─────────────────┘ └──────────┘
|
||||
│
|
||||
(payment)
|
||||
│
|
||||
↓
|
||||
┌─────────┐
|
||||
│ active │←────────────┐
|
||||
└─────────┘ │
|
||||
│ │
|
||||
├────(expires)────→┌─────────┐
|
||||
│ │expired │
|
||||
├────(cancels)────→├─────────┤
|
||||
│ │canceled │
|
||||
└──(deactivate)───→├─────────┤
|
||||
│inactive │
|
||||
└─────────┘
|
||||
│
|
||||
(renew/reactivate)
|
||||
│
|
||||
└──────────┘
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Email Notification Summary
|
||||
|
||||
| Trigger | Emails Sent |
|
||||
|---------|-------------|
|
||||
| Registration complete | Verification email (immediate) |
|
||||
| pending_email day 3, 7, 14, 30 | Verification reminders |
|
||||
| Email verified | Welcome + event attendance instructions |
|
||||
| pending_validation day 30, 60, 80, 85 | Event attendance reminders |
|
||||
| Admin validates | Payment instructions |
|
||||
| payment_pending day 7, 14, 21, 30, 45, 60 | Payment reminders |
|
||||
| Payment successful | Membership activation confirmation |
|
||||
| active: 60, 30, 14, 7 days before expiry | Renewal reminders |
|
||||
| Subscription expires | Expiration notice + renewal link |
|
||||
| expired: 7, 30, 90 days after | Post-expiration renewal reminders |
|
||||
| Status → abandoned | Incomplete application notice |
|
||||
| Admin cancels | Cancellation confirmation |
|
||||
|
||||
---
|
||||
|
||||
## Implementation Notes
|
||||
|
||||
### Configuration Options
|
||||
|
||||
All timeout periods should be configurable via environment variables:
|
||||
|
||||
```bash
|
||||
# Abandonment timeouts (in days, 0 = never auto-abandon)
|
||||
EMAIL_VERIFICATION_TIMEOUT=30
|
||||
EVENT_ATTENDANCE_TIMEOUT=90
|
||||
PAYMENT_TIMEOUT=0 # Don't auto-abandon payment_pending
|
||||
|
||||
# Reminder schedules (comma-separated days)
|
||||
EMAIL_REMINDERS=3,7,14,30
|
||||
EVENT_REMINDERS=30,60,80,85
|
||||
PAYMENT_REMINDERS=7,14,21,30,45,60
|
||||
RENEWAL_REMINDERS=60,30,14,7
|
||||
EXPIRED_REMINDERS=7,30,90
|
||||
```
|
||||
|
||||
### Background Jobs Required
|
||||
|
||||
1. **Daily Status Check** (runs at 00:00 UTC)
|
||||
- Check for expired subscriptions → `expired`
|
||||
- Check for abandonment timeouts (if enabled)
|
||||
|
||||
2. **Hourly Reminder Check** (runs every hour)
|
||||
- Calculate days since status change
|
||||
- Send appropriate reminder emails based on schedule
|
||||
|
||||
### Database Indexes
|
||||
|
||||
```sql
|
||||
CREATE INDEX idx_users_status ON users(status);
|
||||
CREATE INDEX idx_users_created_at ON users(created_at);
|
||||
CREATE INDEX idx_users_updated_at ON users(updated_at);
|
||||
CREATE INDEX idx_subscriptions_end_date ON subscriptions(end_date) WHERE status = 'active';
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Testing Checklist
|
||||
|
||||
- [ ] Reminder emails sent on correct schedule
|
||||
- [ ] Abandonment timeouts respect configuration
|
||||
- [ ] Manual status transitions work correctly
|
||||
- [ ] Role updates on status change
|
||||
- [ ] Newsletter subscription/unsubscription on status change
|
||||
- [ ] Email notifications use correct templates
|
||||
- [ ] Stripe webhook integration for cancellations/expirations
|
||||
- [ ] Admin can bypass requirements and manually transition
|
||||
- [ ] Users can complete registration even after reminders stop
|
||||
|
||||
---
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
1. **Audit Logging**: Create `user_status_log` table to track all transitions
|
||||
2. **Re-engagement Campaigns**: Target abandoned users with special offers
|
||||
3. **Flexible Timeout Periods**: Per-user timeout overrides for special cases
|
||||
4. **A/B Testing**: Test different reminder schedules for better completion rates
|
||||
5. **SMS Reminders**: Optional SMS for critical reminders (payment due, expiration)
|
||||
188
email_service.py
188
email_service.py
@@ -376,3 +376,191 @@ async def send_admin_password_reset_email(
|
||||
"""
|
||||
|
||||
return await send_email(to_email, subject, html_content)
|
||||
|
||||
|
||||
async def send_invitation_email(
|
||||
to_email: str,
|
||||
inviter_name: str,
|
||||
invitation_url: str,
|
||||
role: str
|
||||
):
|
||||
"""Send invitation email to new user"""
|
||||
subject = f"You've Been Invited to Join LOAF - {role.capitalize()} Access"
|
||||
|
||||
role_descriptions = {
|
||||
"member": "full member access to our community",
|
||||
"admin": "administrative access to manage the platform",
|
||||
"superadmin": "full administrative access with system-wide permissions"
|
||||
}
|
||||
|
||||
role_description = role_descriptions.get(role.lower(), "access to our platform")
|
||||
|
||||
html_content = f"""
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<style>
|
||||
body {{ font-family: 'Nunito Sans', Arial, sans-serif; line-height: 1.6; color: #422268; }}
|
||||
.container {{ max-width: 600px; margin: 0 auto; padding: 20px; }}
|
||||
.header {{ background: linear-gradient(135deg, #644c9f 0%, #48286e 100%); padding: 30px; text-align: center; border-radius: 10px 10px 0 0; }}
|
||||
.header h1 {{ color: white; margin: 0; font-family: 'Inter', sans-serif; }}
|
||||
.content {{ background: #FFFFFF; padding: 30px; border-radius: 0 0 10px 10px; }}
|
||||
.button {{ display: inline-block; background: #ff9e77; color: #FFFFFF; padding: 15px 40px; text-decoration: none; border-radius: 50px; font-weight: 600; margin: 20px 0; }}
|
||||
.button:hover {{ background: #e88d66; }}
|
||||
.info-box {{ background: #f1eef9; padding: 20px; border-radius: 8px; margin: 20px 0; border: 2px solid #ddd8eb; }}
|
||||
.note {{ background: #FFEBEE; border-left: 4px solid #ff9e77; padding: 15px; margin: 20px 0; }}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<div class="header">
|
||||
<h1>🎉 You're Invited!</h1>
|
||||
</div>
|
||||
<div class="content">
|
||||
<p><strong>{inviter_name}</strong> has invited you to join the LOAF community with <strong>{role_description}</strong>.</p>
|
||||
|
||||
<div class="info-box">
|
||||
<p style="margin: 0;"><strong>Your Role:</strong> {role.capitalize()}</p>
|
||||
<p style="margin: 10px 0 0 0;"><strong>Invited By:</strong> {inviter_name}</p>
|
||||
</div>
|
||||
|
||||
<p>Click the button below to accept your invitation and create your account:</p>
|
||||
|
||||
<p style="text-align: center;">
|
||||
<a href="{invitation_url}" class="button">Accept Invitation</a>
|
||||
</p>
|
||||
|
||||
<div class="note">
|
||||
<p style="margin: 0; font-size: 14px;"><strong>⏰ This invitation expires in 7 days.</strong></p>
|
||||
<p style="margin: 5px 0 0 0; font-size: 14px;">If you didn't expect this invitation, you can safely ignore this email.</p>
|
||||
</div>
|
||||
|
||||
<p style="margin-top: 20px; color: #664fa3; font-size: 14px;">
|
||||
Or copy and paste this link into your browser:<br>
|
||||
<span style="word-break: break-all;">{invitation_url}</span>
|
||||
</p>
|
||||
|
||||
<p style="margin-top: 30px; padding-top: 20px; border-top: 1px solid #ddd8eb; color: #664fa3; font-size: 14px;">
|
||||
Questions? Contact us at support@loaf.org
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
return await send_email(to_email, subject, html_content)
|
||||
|
||||
|
||||
async def send_donation_thank_you_email(email: str, first_name: str, amount_cents: int):
|
||||
"""Send donation thank you email"""
|
||||
subject = "Thank You for Your Generous Donation!"
|
||||
amount = f"${amount_cents / 100:.2f}"
|
||||
|
||||
html_content = f"""
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<style>
|
||||
body {{ font-family: 'Nunito Sans', Arial, sans-serif; line-height: 1.6; color: #422268; }}
|
||||
.container {{ max-width: 600px; margin: 0 auto; padding: 20px; }}
|
||||
.header {{ background: linear-gradient(135deg, #644c9f 0%, #48286e 100%); padding: 30px; text-align: center; border-radius: 10px 10px 0 0; }}
|
||||
.header h1 {{ color: white; margin: 0; font-family: 'Inter', sans-serif; font-size: 32px; }}
|
||||
.content {{ background: #FFFFFF; padding: 30px; border-radius: 0 0 10px 10px; }}
|
||||
.amount-box {{ background: #f1eef9; padding: 20px; border-radius: 8px; margin: 20px 0; border: 2px solid #ddd8eb; text-align: center; }}
|
||||
.amount {{ color: #422268; font-size: 36px; font-weight: bold; margin: 10px 0; }}
|
||||
.impact-box {{ background: #f9f5ff; border-left: 4px solid #81B29A; padding: 20px; margin: 24px 0; border-radius: 8px; }}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<div class="header">
|
||||
<h1>💜 Thank You!</h1>
|
||||
</div>
|
||||
<div class="content">
|
||||
<p>Dear {first_name},</p>
|
||||
|
||||
<p>Thank you for your generous donation to LOAF!</p>
|
||||
|
||||
<div class="amount-box">
|
||||
<p style="margin: 0; color: #664fa3; font-size: 16px;">Donation Amount</p>
|
||||
<div class="amount">{amount}</div>
|
||||
</div>
|
||||
|
||||
<div class="impact-box">
|
||||
<p style="color: #422268; font-size: 16px; margin: 0;">
|
||||
Your support helps us continue our mission to build and strengthen the LGBTQ+ community.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<p>Your donation is tax-deductible to the extent allowed by law. Please keep this email for your records.</p>
|
||||
|
||||
<p>We are deeply grateful for your commitment to our community and your belief in our work.</p>
|
||||
|
||||
<p style="margin-top: 30px;">
|
||||
With gratitude,<br/>
|
||||
<strong style="color: #422268;">The LOAF Team</strong>
|
||||
</p>
|
||||
|
||||
<p style="margin-top: 30px; padding-top: 20px; border-top: 1px solid #ddd8eb; color: #664fa3; font-size: 14px;">
|
||||
Questions about your donation? Contact us at support@loaf.org
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
return await send_email(email, subject, html_content)
|
||||
|
||||
|
||||
async def send_rejection_email(email: str, first_name: str, reason: str):
|
||||
"""Send rejection notification email"""
|
||||
subject = "LOAF Membership Application Update"
|
||||
|
||||
html_content = f"""
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<style>
|
||||
body {{ font-family: 'Nunito Sans', Arial, sans-serif; line-height: 1.6; color: #422268; }}
|
||||
.container {{ max-width: 600px; margin: 0 auto; padding: 20px; }}
|
||||
.header {{ background: linear-gradient(135deg, #644c9f 0%, #48286e 100%); padding: 30px; text-align: center; border-radius: 10px 10px 0 0; }}
|
||||
.header h1 {{ color: white; margin: 0; font-family: 'Inter', sans-serif; }}
|
||||
.content {{ background: #FFFFFF; padding: 30px; border-radius: 0 0 10px 10px; }}
|
||||
.reason-box {{ background: #f9f5ff; border-left: 4px solid #ff9e77; padding: 20px; margin: 24px 0; border-radius: 8px; }}
|
||||
.reason-box p {{ color: #422268; font-size: 14px; margin: 0; white-space: pre-wrap; }}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<div class="header">
|
||||
<h1>Membership Application Update</h1>
|
||||
</div>
|
||||
<div class="content">
|
||||
<p>Dear {first_name},</p>
|
||||
|
||||
<p>Thank you for your interest in joining LOAF. After careful review, we are unable to approve your membership application at this time.</p>
|
||||
|
||||
<div class="reason-box">
|
||||
<p><strong>Reason:</strong></p>
|
||||
<p>{reason}</p>
|
||||
</div>
|
||||
|
||||
<p>If you have questions or would like to discuss this decision, please don't hesitate to contact us at support@loaf.org.</p>
|
||||
|
||||
<p style="margin-top: 30px;">
|
||||
Warm regards,<br/>
|
||||
<strong style="color: #422268;">The LOAF Team</strong>
|
||||
</p>
|
||||
|
||||
<p style="margin-top: 30px; padding-top: 20px; border-top: 1px solid #ddd8eb; color: #664fa3; font-size: 14px;">
|
||||
Questions? Contact us at support@loaf.org
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
return await send_email(email, subject, html_content)
|
||||
|
||||
122
encryption_service.py
Normal file
122
encryption_service.py
Normal file
@@ -0,0 +1,122 @@
|
||||
"""
|
||||
Encryption service for sensitive settings stored in database.
|
||||
|
||||
Uses Fernet symmetric encryption (AES-128 in CBC mode with HMAC authentication).
|
||||
The encryption key is derived from a master secret stored in .env.
|
||||
"""
|
||||
|
||||
import os
|
||||
import base64
|
||||
from cryptography.fernet import Fernet
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
|
||||
|
||||
class EncryptionService:
|
||||
"""Service for encrypting and decrypting sensitive configuration values"""
|
||||
|
||||
def __init__(self):
|
||||
# Get master encryption key from environment
|
||||
# This should be a long, random string (e.g., 64 characters)
|
||||
# Generate one with: python -c "import secrets; print(secrets.token_urlsafe(64))"
|
||||
self.master_secret = os.environ.get('SETTINGS_ENCRYPTION_KEY')
|
||||
|
||||
if not self.master_secret:
|
||||
raise ValueError(
|
||||
"SETTINGS_ENCRYPTION_KEY environment variable not set. "
|
||||
"Generate one with: python -c \"import secrets; print(secrets.token_urlsafe(64))\""
|
||||
)
|
||||
|
||||
# Derive encryption key from master secret using PBKDF2HMAC
|
||||
# This adds an extra layer of security
|
||||
kdf = PBKDF2HMAC(
|
||||
algorithm=hashes.SHA256(),
|
||||
length=32,
|
||||
salt=b'systemsettings', # Fixed salt (OK for key derivation from strong secret)
|
||||
iterations=100000,
|
||||
backend=default_backend()
|
||||
)
|
||||
key = base64.urlsafe_b64encode(kdf.derive(self.master_secret.encode()))
|
||||
self.cipher = Fernet(key)
|
||||
|
||||
def encrypt(self, plaintext: str) -> str:
|
||||
"""
|
||||
Encrypt a plaintext string.
|
||||
|
||||
Args:
|
||||
plaintext: The string to encrypt
|
||||
|
||||
Returns:
|
||||
Base64-encoded encrypted string
|
||||
"""
|
||||
if not plaintext:
|
||||
return ""
|
||||
|
||||
encrypted_bytes = self.cipher.encrypt(plaintext.encode())
|
||||
return encrypted_bytes.decode('utf-8')
|
||||
|
||||
def decrypt(self, encrypted: str) -> str:
|
||||
"""
|
||||
Decrypt an encrypted string.
|
||||
|
||||
Args:
|
||||
encrypted: The base64-encoded encrypted string
|
||||
|
||||
Returns:
|
||||
Decrypted plaintext string
|
||||
|
||||
Raises:
|
||||
cryptography.fernet.InvalidToken: If decryption fails (wrong key or corrupted data)
|
||||
"""
|
||||
if not encrypted:
|
||||
return ""
|
||||
|
||||
decrypted_bytes = self.cipher.decrypt(encrypted.encode())
|
||||
return decrypted_bytes.decode('utf-8')
|
||||
|
||||
def is_encrypted(self, value: str) -> bool:
|
||||
"""
|
||||
Check if a value appears to be encrypted (starts with Fernet token format).
|
||||
|
||||
This is a heuristic check - not 100% reliable but useful for validation.
|
||||
|
||||
Args:
|
||||
value: String to check
|
||||
|
||||
Returns:
|
||||
True if value looks like a Fernet token
|
||||
"""
|
||||
if not value:
|
||||
return False
|
||||
|
||||
# Fernet tokens are base64-encoded and start with version byte (gAAAAA...)
|
||||
# They're always > 60 characters
|
||||
try:
|
||||
return len(value) > 60 and value.startswith('gAAAAA')
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
# Global encryption service instance
|
||||
# Initialize on module import so it fails fast if encryption key is missing
|
||||
try:
|
||||
encryption_service = EncryptionService()
|
||||
except ValueError as e:
|
||||
print(f"WARNING: {e}")
|
||||
print("Encryption service will not be available.")
|
||||
encryption_service = None
|
||||
|
||||
|
||||
def get_encryption_service() -> EncryptionService:
|
||||
"""
|
||||
Get the global encryption service instance.
|
||||
|
||||
Raises:
|
||||
ValueError: If encryption service is not initialized (missing SETTINGS_ENCRYPTION_KEY)
|
||||
"""
|
||||
if encryption_service is None:
|
||||
raise ValueError(
|
||||
"Encryption service not initialized. Set SETTINGS_ENCRYPTION_KEY environment variable."
|
||||
)
|
||||
return encryption_service
|
||||
44
fix_all_schema_mismatches.sh
Normal file
44
fix_all_schema_mismatches.sh
Normal file
@@ -0,0 +1,44 @@
|
||||
#!/bin/bash
|
||||
# Fix all schema mismatches between models.py and database
|
||||
# Run this on your server
|
||||
|
||||
set -e # Exit on error
|
||||
|
||||
echo "============================================================"
|
||||
echo "Schema Mismatch Fix Script"
|
||||
echo "============================================================"
|
||||
echo ""
|
||||
|
||||
# Navigate to backend directory
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
echo "Step 1: Check current Alembic status..."
|
||||
python3 -m alembic current
|
||||
|
||||
echo ""
|
||||
echo "Step 2: Apply migration 003 (user_invitations fields)..."
|
||||
python3 -m alembic upgrade head
|
||||
|
||||
echo ""
|
||||
echo "Step 3: Verify migration was applied..."
|
||||
python3 -m alembic current
|
||||
|
||||
echo ""
|
||||
echo "Step 4: Restart PM2 backend..."
|
||||
pm2 restart membership-backend
|
||||
|
||||
echo ""
|
||||
echo "============================================================"
|
||||
echo "✅ Schema fixes applied!"
|
||||
echo "============================================================"
|
||||
echo ""
|
||||
echo "Migrations applied:"
|
||||
echo " - 001_initial_baseline"
|
||||
echo " - 002_add_missing_user_fields (users table)"
|
||||
echo " - 003_add_user_invitation_fields (user_invitations table)"
|
||||
echo ""
|
||||
echo "Please test:"
|
||||
echo " 1. Login to admin dashboard"
|
||||
echo " 2. Navigate to user invitations page"
|
||||
echo " 3. Verify no more schema errors"
|
||||
echo ""
|
||||
98
inspect_current_state.py
Normal file
98
inspect_current_state.py
Normal file
@@ -0,0 +1,98 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Inspect current database state to understand the duplicate key issue
|
||||
"""
|
||||
import os
|
||||
from sqlalchemy import create_engine, text
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
# Use dev server database
|
||||
DATABASE_URL = "postgresql://postgres:RchhcpaUKZuZuMOvB5kwCP1weLBnAG6tNMXE5FHdk8AwCvolBMALYFVYRM7WCl9x@10.9.23.11:5001/membership_demo"
|
||||
|
||||
engine = create_engine(DATABASE_URL)
|
||||
Session = sessionmaker(bind=engine)
|
||||
db = Session()
|
||||
|
||||
print("=" * 80)
|
||||
print("DATABASE STATE INSPECTION")
|
||||
print("=" * 80)
|
||||
|
||||
# Check current counts
|
||||
result = db.execute(text("SELECT COUNT(*) FROM permissions"))
|
||||
perm_count = result.scalar()
|
||||
print(f"\nTotal permissions: {perm_count}")
|
||||
|
||||
result = db.execute(text("SELECT COUNT(*) FROM role_permissions"))
|
||||
rp_count = result.scalar()
|
||||
print(f"Total role_permissions: {rp_count}")
|
||||
|
||||
if rp_count > 0:
|
||||
print("\n" + "=" * 80)
|
||||
print("CURRENT ROLE-PERMISSION MAPPINGS (grouped by role)")
|
||||
print("=" * 80)
|
||||
|
||||
result = db.execute(text("""
|
||||
SELECT
|
||||
rp.role,
|
||||
r.name as role_name,
|
||||
COUNT(*) as permission_count
|
||||
FROM role_permissions rp
|
||||
LEFT JOIN roles r ON rp.role_id = r.id
|
||||
GROUP BY rp.role, r.name
|
||||
ORDER BY rp.role
|
||||
"""))
|
||||
|
||||
for row in result.fetchall():
|
||||
print(f" {row[0]:15} ({row[1]:20}): {row[2]} permissions")
|
||||
|
||||
print("\n" + "=" * 80)
|
||||
print("CHECKING FOR DUPLICATES")
|
||||
print("=" * 80)
|
||||
|
||||
# Check if there are actual duplicates
|
||||
result = db.execute(text("""
|
||||
SELECT role, permission_id, COUNT(*) as count
|
||||
FROM role_permissions
|
||||
GROUP BY role, permission_id
|
||||
HAVING COUNT(*) > 1
|
||||
"""))
|
||||
|
||||
duplicates = result.fetchall()
|
||||
if duplicates:
|
||||
print(f"\n⚠️ Found {len(duplicates)} duplicate (role, permission_id) pairs:")
|
||||
for dup in duplicates[:10]: # Show first 10
|
||||
print(f" role={dup[0]}, permission_id={dup[1]}, count={dup[2]}")
|
||||
else:
|
||||
print("\n✓ No duplicate (role, permission_id) pairs found")
|
||||
|
||||
print("\n" + "=" * 80)
|
||||
print("CHECKING SPECIFIC ADMIN PERMISSIONS")
|
||||
print("=" * 80)
|
||||
|
||||
# Check how many admin permissions exist
|
||||
result = db.execute(text("""
|
||||
SELECT COUNT(*)
|
||||
FROM role_permissions
|
||||
WHERE role = 'admin'
|
||||
"""))
|
||||
admin_count = result.scalar()
|
||||
print(f"\nAdmin has {admin_count} permission assignments")
|
||||
|
||||
# Check if the specific permission mentioned in error exists
|
||||
result = db.execute(text("""
|
||||
SELECT rp.id, rp.role, p.code, p.name
|
||||
FROM role_permissions rp
|
||||
JOIN permissions p ON rp.permission_id = p.id
|
||||
WHERE rp.role = 'admin'
|
||||
ORDER BY p.code
|
||||
LIMIT 20
|
||||
"""))
|
||||
|
||||
print("\nFirst 20 admin permissions:")
|
||||
for row in result.fetchall():
|
||||
print(f" {row[1]:10} -> {row[2]:30} ({row[3]})")
|
||||
|
||||
db.close()
|
||||
145
migrate_role_permissions_to_dynamic_roles.py
Normal file
145
migrate_role_permissions_to_dynamic_roles.py
Normal file
@@ -0,0 +1,145 @@
|
||||
"""
|
||||
Role Permissions Migration Script (Phase 3)
|
||||
|
||||
This script migrates role_permissions from the legacy role enum to the new dynamic role system.
|
||||
For each role_permission, it maps the current role enum value to the corresponding role_id.
|
||||
|
||||
Usage:
|
||||
python migrate_role_permissions_to_dynamic_roles.py
|
||||
|
||||
Environment Variables:
|
||||
DATABASE_URL - PostgreSQL connection string
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from database import Base
|
||||
from models import RolePermission, Role, UserRole
|
||||
from dotenv import load_dotenv
|
||||
|
||||
# Load environment variables
|
||||
load_dotenv()
|
||||
|
||||
# Database connection
|
||||
DATABASE_URL = os.getenv("DATABASE_URL")
|
||||
if not DATABASE_URL:
|
||||
print("Error: DATABASE_URL environment variable not set")
|
||||
sys.exit(1)
|
||||
|
||||
engine = create_engine(DATABASE_URL)
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
|
||||
def migrate_role_permissions():
|
||||
"""Migrate role_permissions from enum role to role_id"""
|
||||
db = SessionLocal()
|
||||
|
||||
try:
|
||||
print("🚀 Starting role_permissions migration (Phase 3)...")
|
||||
print("="*60)
|
||||
|
||||
# Step 1: Load all roles into a map
|
||||
print("\n📋 Loading roles from database...")
|
||||
roles = db.query(Role).all()
|
||||
role_map = {role.code: role for role in roles}
|
||||
|
||||
print(f"✓ Loaded {len(roles)} roles:")
|
||||
for role in roles:
|
||||
print(f" • {role.name} ({role.code}) - ID: {role.id}")
|
||||
|
||||
# Step 2: Get all role_permissions
|
||||
print("\n🔐 Loading role_permissions...")
|
||||
role_permissions = db.query(RolePermission).all()
|
||||
print(f"✓ Found {len(role_permissions)} role_permission records to migrate")
|
||||
|
||||
if not role_permissions:
|
||||
print("\n✅ No role_permissions to migrate!")
|
||||
return
|
||||
|
||||
# Step 3: Check if any role_permissions already have role_id set
|
||||
perms_with_role_id = [rp for rp in role_permissions if rp.role_id is not None]
|
||||
if perms_with_role_id:
|
||||
print(f"\n⚠️ Warning: {len(perms_with_role_id)} role_permissions already have role_id set")
|
||||
response = input("Do you want to re-migrate these records? (yes/no): ")
|
||||
if response.lower() != 'yes':
|
||||
print("Skipping role_permissions that already have role_id set...")
|
||||
role_permissions = [rp for rp in role_permissions if rp.role_id is None]
|
||||
print(f"Will migrate {len(role_permissions)} role_permissions without role_id")
|
||||
|
||||
if not role_permissions:
|
||||
print("\n✅ No role_permissions to migrate!")
|
||||
return
|
||||
|
||||
# Step 4: Migrate role_permissions
|
||||
print(f"\n🔄 Migrating {len(role_permissions)} role_permission records...")
|
||||
|
||||
migration_stats = {
|
||||
UserRole.guest: 0,
|
||||
UserRole.member: 0,
|
||||
UserRole.admin: 0,
|
||||
UserRole.superadmin: 0
|
||||
}
|
||||
|
||||
for rp in role_permissions:
|
||||
# Get the enum role code (e.g., "guest", "member", "admin", "superadmin")
|
||||
role_code = rp.role.value
|
||||
|
||||
# Find the matching role in the roles table
|
||||
if role_code not in role_map:
|
||||
print(f" ⚠️ Warning: No matching role found for '{role_code}' (permission_id: {rp.permission_id})")
|
||||
continue
|
||||
|
||||
# Set the role_id
|
||||
rp.role_id = role_map[role_code].id
|
||||
migration_stats[rp.role] = migration_stats.get(rp.role, 0) + 1
|
||||
|
||||
# Commit all changes
|
||||
db.commit()
|
||||
print(f"✓ Migrated {len(role_permissions)} role_permission records")
|
||||
|
||||
# Step 5: Display migration summary
|
||||
print("\n" + "="*60)
|
||||
print("📊 Migration Summary:")
|
||||
print("="*60)
|
||||
print("\nRole permissions migrated by role:")
|
||||
for role_enum, count in migration_stats.items():
|
||||
if count > 0:
|
||||
print(f" • {role_enum.value}: {count} permissions")
|
||||
|
||||
# Step 6: Verify migration
|
||||
print("\n🔍 Verifying migration...")
|
||||
perms_without_role_id = db.query(RolePermission).filter(RolePermission.role_id == None).count()
|
||||
perms_with_role_id = db.query(RolePermission).filter(RolePermission.role_id != None).count()
|
||||
|
||||
print(f" • Role permissions with role_id: {perms_with_role_id}")
|
||||
print(f" • Role permissions without role_id: {perms_without_role_id}")
|
||||
|
||||
if perms_without_role_id > 0:
|
||||
print(f"\n⚠️ Warning: {perms_without_role_id} role_permissions still don't have role_id set!")
|
||||
else:
|
||||
print("\n✅ All role_permissions successfully migrated!")
|
||||
|
||||
print("\n" + "="*60)
|
||||
print("✅ Role permissions migration completed successfully!")
|
||||
print("="*60)
|
||||
|
||||
print("\n📝 Next Steps:")
|
||||
print(" 1. Update auth.py to use dynamic roles")
|
||||
print(" 2. Update server.py role checks")
|
||||
print(" 3. Verify system still works with new roles")
|
||||
print(" 4. In Phase 4, remove legacy enum columns")
|
||||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
print(f"\n❌ Error migrating role_permissions: {str(e)}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
raise
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
migrate_role_permissions()
|
||||
141
migrate_users_to_dynamic_roles.py
Normal file
141
migrate_users_to_dynamic_roles.py
Normal file
@@ -0,0 +1,141 @@
|
||||
"""
|
||||
User Role Migration Script (Phase 3)
|
||||
|
||||
This script migrates existing users from the legacy role enum to the new dynamic role system.
|
||||
For each user, it maps their current role enum value to the corresponding role_id in the roles table.
|
||||
|
||||
Usage:
|
||||
python migrate_users_to_dynamic_roles.py
|
||||
|
||||
Environment Variables:
|
||||
DATABASE_URL - PostgreSQL connection string
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from database import Base
|
||||
from models import User, Role, UserRole
|
||||
from dotenv import load_dotenv
|
||||
|
||||
# Load environment variables
|
||||
load_dotenv()
|
||||
|
||||
# Database connection
|
||||
DATABASE_URL = os.getenv("DATABASE_URL")
|
||||
if not DATABASE_URL:
|
||||
print("Error: DATABASE_URL environment variable not set")
|
||||
sys.exit(1)
|
||||
|
||||
engine = create_engine(DATABASE_URL)
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
|
||||
def migrate_users():
|
||||
"""Migrate users from enum role to role_id"""
|
||||
db = SessionLocal()
|
||||
|
||||
try:
|
||||
print("🚀 Starting user role migration (Phase 3)...")
|
||||
print("="*60)
|
||||
|
||||
# Step 1: Load all roles into a map
|
||||
print("\n📋 Loading roles from database...")
|
||||
roles = db.query(Role).all()
|
||||
role_map = {role.code: role for role in roles}
|
||||
|
||||
print(f"✓ Loaded {len(roles)} roles:")
|
||||
for role in roles:
|
||||
print(f" • {role.name} ({role.code}) - ID: {role.id}")
|
||||
|
||||
# Step 2: Get all users
|
||||
print("\n👥 Loading users...")
|
||||
users = db.query(User).all()
|
||||
print(f"✓ Found {len(users)} users to migrate")
|
||||
|
||||
# Step 3: Check if any users already have role_id set
|
||||
users_with_role_id = [u for u in users if u.role_id is not None]
|
||||
if users_with_role_id:
|
||||
print(f"\n⚠️ Warning: {len(users_with_role_id)} users already have role_id set")
|
||||
response = input("Do you want to re-migrate these users? (yes/no): ")
|
||||
if response.lower() != 'yes':
|
||||
print("Skipping users that already have role_id set...")
|
||||
users = [u for u in users if u.role_id is None]
|
||||
print(f"Will migrate {len(users)} users without role_id")
|
||||
|
||||
if not users:
|
||||
print("\n✅ No users to migrate!")
|
||||
return
|
||||
|
||||
# Step 4: Migrate users
|
||||
print(f"\n🔄 Migrating {len(users)} users...")
|
||||
|
||||
migration_stats = {
|
||||
UserRole.guest: 0,
|
||||
UserRole.member: 0,
|
||||
UserRole.admin: 0,
|
||||
UserRole.superadmin: 0
|
||||
}
|
||||
|
||||
for user in users:
|
||||
# Get the enum role code (e.g., "guest", "member", "admin", "superadmin")
|
||||
role_code = user.role.value
|
||||
|
||||
# Find the matching role in the roles table
|
||||
if role_code not in role_map:
|
||||
print(f" ⚠️ Warning: No matching role found for '{role_code}' (user: {user.email})")
|
||||
continue
|
||||
|
||||
# Set the role_id
|
||||
user.role_id = role_map[role_code].id
|
||||
migration_stats[user.role] = migration_stats.get(user.role, 0) + 1
|
||||
|
||||
# Commit all changes
|
||||
db.commit()
|
||||
print(f"✓ Migrated {len(users)} users")
|
||||
|
||||
# Step 5: Display migration summary
|
||||
print("\n" + "="*60)
|
||||
print("📊 Migration Summary:")
|
||||
print("="*60)
|
||||
print("\nUsers migrated by role:")
|
||||
for role_enum, count in migration_stats.items():
|
||||
if count > 0:
|
||||
print(f" • {role_enum.value}: {count} users")
|
||||
|
||||
# Step 6: Verify migration
|
||||
print("\n🔍 Verifying migration...")
|
||||
users_without_role_id = db.query(User).filter(User.role_id == None).count()
|
||||
users_with_role_id = db.query(User).filter(User.role_id != None).count()
|
||||
|
||||
print(f" • Users with role_id: {users_with_role_id}")
|
||||
print(f" • Users without role_id: {users_without_role_id}")
|
||||
|
||||
if users_without_role_id > 0:
|
||||
print(f"\n⚠️ Warning: {users_without_role_id} users still don't have role_id set!")
|
||||
else:
|
||||
print("\n✅ All users successfully migrated!")
|
||||
|
||||
print("\n" + "="*60)
|
||||
print("✅ User migration completed successfully!")
|
||||
print("="*60)
|
||||
|
||||
print("\n📝 Next Steps:")
|
||||
print(" 1. Migrate role_permissions table")
|
||||
print(" 2. Update auth.py to use dynamic roles")
|
||||
print(" 3. Update server.py role checks")
|
||||
print(" 4. Verify system still works with new roles")
|
||||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
print(f"\n❌ Error migrating users: {str(e)}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
raise
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
migrate_users()
|
||||
693
migrations/000_initial_schema.sql
Normal file
693
migrations/000_initial_schema.sql
Normal file
@@ -0,0 +1,693 @@
|
||||
-- ============================================================================
|
||||
-- Migration 000: Initial Database Schema
|
||||
-- ============================================================================
|
||||
-- Description: Creates all base tables, enums, and indexes for the LOAF
|
||||
-- membership platform. This migration should be run first on
|
||||
-- a fresh database.
|
||||
-- Date: 2024-12-18
|
||||
-- Author: LOAF Development Team
|
||||
-- ============================================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ============================================================================
|
||||
-- SECTION 1: Create ENUM Types
|
||||
-- ============================================================================
|
||||
|
||||
-- User status enum
|
||||
CREATE TYPE userstatus AS ENUM (
|
||||
'pending_email',
|
||||
'pending_validation',
|
||||
'pre_validated',
|
||||
'payment_pending',
|
||||
'active',
|
||||
'inactive',
|
||||
'canceled',
|
||||
'expired',
|
||||
'abandoned',
|
||||
'rejected'
|
||||
);
|
||||
|
||||
-- User role enum
|
||||
CREATE TYPE userrole AS ENUM (
|
||||
'guest',
|
||||
'member',
|
||||
'admin',
|
||||
'finance',
|
||||
'superadmin'
|
||||
);
|
||||
|
||||
-- RSVP status enum
|
||||
CREATE TYPE rsvpstatus AS ENUM (
|
||||
'yes',
|
||||
'no',
|
||||
'maybe'
|
||||
);
|
||||
|
||||
-- Subscription status enum
|
||||
CREATE TYPE subscriptionstatus AS ENUM (
|
||||
'active',
|
||||
'cancelled',
|
||||
'expired'
|
||||
);
|
||||
|
||||
-- Donation type enum
|
||||
CREATE TYPE donationtype AS ENUM (
|
||||
'member',
|
||||
'public'
|
||||
);
|
||||
|
||||
-- Donation status enum
|
||||
CREATE TYPE donationstatus AS ENUM (
|
||||
'pending',
|
||||
'completed',
|
||||
'failed'
|
||||
);
|
||||
|
||||
-- Invitation status enum
|
||||
CREATE TYPE invitationstatus AS ENUM (
|
||||
'pending',
|
||||
'accepted',
|
||||
'expired',
|
||||
'revoked'
|
||||
);
|
||||
|
||||
-- Import job status enum
|
||||
CREATE TYPE importjobstatus AS ENUM (
|
||||
'processing',
|
||||
'completed',
|
||||
'failed',
|
||||
'partial',
|
||||
'validating',
|
||||
'preview_ready',
|
||||
'rolled_back'
|
||||
);
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- Display progress
|
||||
SELECT 'Step 1/8 completed: ENUM types created' AS progress;
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ============================================================================
|
||||
-- SECTION 2: Create Core Tables
|
||||
-- ============================================================================
|
||||
|
||||
-- Import Jobs table (must be created before users due to FK reference)
|
||||
CREATE TABLE IF NOT EXISTS import_jobs (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
|
||||
filename VARCHAR NOT NULL,
|
||||
status importjobstatus NOT NULL DEFAULT 'processing',
|
||||
total_rows INTEGER DEFAULT 0,
|
||||
processed_rows INTEGER DEFAULT 0,
|
||||
success_count INTEGER DEFAULT 0,
|
||||
error_count INTEGER DEFAULT 0,
|
||||
error_log JSONB DEFAULT '[]'::jsonb,
|
||||
|
||||
-- WordPress import enhancements
|
||||
field_mapping JSONB DEFAULT '{}'::jsonb,
|
||||
wordpress_metadata JSONB DEFAULT '{}'::jsonb,
|
||||
imported_user_ids JSONB DEFAULT '[]'::jsonb,
|
||||
rollback_at TIMESTAMP WITH TIME ZONE,
|
||||
rollback_by UUID, -- Will be updated with FK after users table exists
|
||||
|
||||
started_by UUID, -- Will be updated with FK after users table exists
|
||||
started_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
completed_at TIMESTAMP WITH TIME ZONE
|
||||
);
|
||||
|
||||
-- Users table
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
|
||||
-- Authentication
|
||||
email VARCHAR NOT NULL UNIQUE,
|
||||
password_hash VARCHAR NOT NULL,
|
||||
email_verified BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
email_verification_token VARCHAR UNIQUE,
|
||||
email_verification_expires TIMESTAMP WITH TIME ZONE,
|
||||
|
||||
-- Personal Information
|
||||
first_name VARCHAR NOT NULL,
|
||||
last_name VARCHAR NOT NULL,
|
||||
phone VARCHAR,
|
||||
address VARCHAR,
|
||||
city VARCHAR,
|
||||
state VARCHAR(2),
|
||||
zipcode VARCHAR(10),
|
||||
date_of_birth DATE,
|
||||
|
||||
-- Profile
|
||||
profile_photo_url VARCHAR,
|
||||
|
||||
-- Social Media
|
||||
social_media_facebook VARCHAR,
|
||||
social_media_instagram VARCHAR,
|
||||
social_media_twitter VARCHAR,
|
||||
social_media_linkedin VARCHAR,
|
||||
|
||||
-- Partner Information
|
||||
partner_first_name VARCHAR,
|
||||
partner_last_name VARCHAR,
|
||||
partner_is_member BOOLEAN DEFAULT FALSE,
|
||||
partner_plan_to_become_member BOOLEAN DEFAULT FALSE,
|
||||
|
||||
-- Referral
|
||||
referred_by_member_name VARCHAR,
|
||||
lead_sources JSONB DEFAULT '[]'::jsonb,
|
||||
|
||||
-- Status & Role
|
||||
status userstatus NOT NULL DEFAULT 'pending_email',
|
||||
role userrole NOT NULL DEFAULT 'guest',
|
||||
role_id UUID, -- For dynamic RBAC
|
||||
|
||||
-- Newsletter Preferences
|
||||
newsletter_subscribed BOOLEAN DEFAULT TRUE,
|
||||
newsletter_publish_name BOOLEAN DEFAULT FALSE NOT NULL,
|
||||
newsletter_publish_photo BOOLEAN DEFAULT FALSE NOT NULL,
|
||||
newsletter_publish_birthday BOOLEAN DEFAULT FALSE NOT NULL,
|
||||
newsletter_publish_none BOOLEAN DEFAULT FALSE NOT NULL,
|
||||
|
||||
-- Volunteer Interests
|
||||
volunteer_interests JSONB DEFAULT '[]'::jsonb,
|
||||
|
||||
-- Scholarship Request
|
||||
scholarship_requested BOOLEAN DEFAULT FALSE NOT NULL,
|
||||
scholarship_reason TEXT,
|
||||
|
||||
-- Directory Settings
|
||||
show_in_directory BOOLEAN DEFAULT FALSE NOT NULL,
|
||||
directory_email VARCHAR,
|
||||
directory_bio TEXT,
|
||||
directory_address VARCHAR,
|
||||
directory_phone VARCHAR,
|
||||
directory_dob DATE,
|
||||
directory_partner_name VARCHAR,
|
||||
|
||||
-- Password Reset
|
||||
password_reset_token VARCHAR,
|
||||
password_reset_expires TIMESTAMP WITH TIME ZONE,
|
||||
force_password_change BOOLEAN DEFAULT FALSE NOT NULL,
|
||||
|
||||
-- Terms of Service
|
||||
accepts_tos BOOLEAN DEFAULT FALSE NOT NULL,
|
||||
tos_accepted_at TIMESTAMP WITH TIME ZONE,
|
||||
|
||||
-- Membership
|
||||
member_since DATE,
|
||||
|
||||
-- Reminder Tracking
|
||||
email_verification_reminders_sent INTEGER DEFAULT 0 NOT NULL,
|
||||
last_email_verification_reminder_at TIMESTAMP WITH TIME ZONE,
|
||||
event_attendance_reminders_sent INTEGER DEFAULT 0 NOT NULL,
|
||||
last_event_attendance_reminder_at TIMESTAMP WITH TIME ZONE,
|
||||
payment_reminders_sent INTEGER DEFAULT 0 NOT NULL,
|
||||
last_payment_reminder_at TIMESTAMP WITH TIME ZONE,
|
||||
renewal_reminders_sent INTEGER DEFAULT 0 NOT NULL,
|
||||
last_renewal_reminder_at TIMESTAMP WITH TIME ZONE,
|
||||
|
||||
-- Rejection Tracking
|
||||
rejection_reason TEXT,
|
||||
rejected_at TIMESTAMP WITH TIME ZONE,
|
||||
rejected_by UUID REFERENCES users(id),
|
||||
|
||||
-- WordPress Import Tracking
|
||||
import_source VARCHAR(50),
|
||||
import_job_id UUID REFERENCES import_jobs(id),
|
||||
wordpress_user_id BIGINT,
|
||||
wordpress_registered_date TIMESTAMP WITH TIME ZONE,
|
||||
|
||||
-- Role Change Audit Trail
|
||||
role_changed_at TIMESTAMP WITH TIME ZONE,
|
||||
role_changed_by UUID REFERENCES users(id) ON DELETE SET NULL,
|
||||
|
||||
-- Timestamps
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Events table
|
||||
CREATE TABLE IF NOT EXISTS events (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
|
||||
-- Event Details
|
||||
title VARCHAR NOT NULL,
|
||||
description TEXT,
|
||||
location VARCHAR,
|
||||
cover_image_url VARCHAR,
|
||||
|
||||
-- Schedule
|
||||
start_at TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
end_at TIMESTAMP WITH TIME ZONE,
|
||||
|
||||
-- Capacity
|
||||
capacity INTEGER,
|
||||
published BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
|
||||
-- Calendar Integration
|
||||
calendar_uid VARCHAR UNIQUE,
|
||||
microsoft_calendar_id VARCHAR,
|
||||
microsoft_calendar_sync_enabled BOOLEAN DEFAULT FALSE,
|
||||
|
||||
-- Metadata
|
||||
created_by UUID REFERENCES users(id),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Event RSVPs table
|
||||
CREATE TABLE IF NOT EXISTS event_rsvps (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
|
||||
event_id UUID NOT NULL REFERENCES events(id) ON DELETE CASCADE,
|
||||
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
|
||||
-- RSVP Details
|
||||
rsvp_status rsvpstatus NOT NULL DEFAULT 'maybe',
|
||||
attended BOOLEAN DEFAULT FALSE,
|
||||
attended_at TIMESTAMP WITH TIME ZONE,
|
||||
|
||||
-- Timestamps
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
-- Unique constraint: one RSVP per user per event
|
||||
UNIQUE(event_id, user_id)
|
||||
);
|
||||
|
||||
-- Event Gallery table
|
||||
CREATE TABLE IF NOT EXISTS event_galleries (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
|
||||
event_id UUID NOT NULL REFERENCES events(id) ON DELETE CASCADE,
|
||||
|
||||
-- Image Details
|
||||
image_url VARCHAR NOT NULL,
|
||||
caption TEXT,
|
||||
order_index INTEGER DEFAULT 0,
|
||||
|
||||
-- Metadata
|
||||
uploaded_by UUID REFERENCES users(id),
|
||||
uploaded_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- Display progress
|
||||
SELECT 'Step 2/8 completed: Core tables (users, events, rsvps, gallery) created' AS progress;
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ============================================================================
|
||||
-- SECTION 3: Create Subscription & Payment Tables
|
||||
-- ============================================================================
|
||||
|
||||
-- Subscription Plans table
|
||||
CREATE TABLE IF NOT EXISTS subscription_plans (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
|
||||
-- Plan Details
|
||||
name VARCHAR NOT NULL,
|
||||
description TEXT,
|
||||
price_cents INTEGER NOT NULL,
|
||||
billing_cycle VARCHAR NOT NULL DEFAULT 'yearly',
|
||||
stripe_price_id VARCHAR, -- Legacy, deprecated
|
||||
|
||||
-- Configuration
|
||||
active BOOLEAN NOT NULL DEFAULT TRUE,
|
||||
|
||||
-- Custom billing cycle fields (for recurring date ranges like Jan 1 - Dec 31)
|
||||
custom_cycle_enabled BOOLEAN DEFAULT FALSE NOT NULL,
|
||||
custom_cycle_start_month INTEGER,
|
||||
custom_cycle_start_day INTEGER,
|
||||
custom_cycle_end_month INTEGER,
|
||||
custom_cycle_end_day INTEGER,
|
||||
|
||||
-- Dynamic pricing fields
|
||||
minimum_price_cents INTEGER DEFAULT 3000 NOT NULL,
|
||||
suggested_price_cents INTEGER,
|
||||
allow_donation BOOLEAN DEFAULT TRUE NOT NULL,
|
||||
|
||||
-- Timestamps
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Subscriptions table
|
||||
CREATE TABLE IF NOT EXISTS subscriptions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
|
||||
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
plan_id UUID NOT NULL REFERENCES subscription_plans(id),
|
||||
|
||||
-- Stripe Integration
|
||||
stripe_subscription_id VARCHAR,
|
||||
stripe_customer_id VARCHAR,
|
||||
|
||||
-- Status & Dates
|
||||
status subscriptionstatus DEFAULT 'active',
|
||||
start_date TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
end_date TIMESTAMP WITH TIME ZONE,
|
||||
|
||||
-- Payment Details
|
||||
amount_paid_cents INTEGER,
|
||||
base_subscription_cents INTEGER NOT NULL,
|
||||
donation_cents INTEGER DEFAULT 0 NOT NULL,
|
||||
|
||||
-- Stripe transaction metadata (for validation and audit)
|
||||
stripe_payment_intent_id VARCHAR,
|
||||
stripe_charge_id VARCHAR,
|
||||
stripe_invoice_id VARCHAR,
|
||||
payment_completed_at TIMESTAMP WITH TIME ZONE,
|
||||
card_last4 VARCHAR(4),
|
||||
card_brand VARCHAR(20),
|
||||
stripe_receipt_url VARCHAR,
|
||||
|
||||
-- Manual Payment Support
|
||||
manual_payment BOOLEAN DEFAULT FALSE NOT NULL,
|
||||
manual_payment_notes TEXT,
|
||||
manual_payment_admin_id UUID REFERENCES users(id),
|
||||
manual_payment_date TIMESTAMP WITH TIME ZONE,
|
||||
payment_method VARCHAR,
|
||||
|
||||
-- Timestamps
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Donations table
|
||||
CREATE TABLE IF NOT EXISTS donations (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
|
||||
-- Donation Details
|
||||
amount_cents INTEGER NOT NULL,
|
||||
donation_type donationtype NOT NULL DEFAULT 'public',
|
||||
status donationstatus NOT NULL DEFAULT 'pending',
|
||||
|
||||
-- Donor Information
|
||||
user_id UUID REFERENCES users(id), -- NULL for public donations
|
||||
donor_email VARCHAR,
|
||||
donor_name VARCHAR,
|
||||
|
||||
-- Payment Details
|
||||
stripe_checkout_session_id VARCHAR,
|
||||
stripe_payment_intent_id VARCHAR,
|
||||
payment_method VARCHAR,
|
||||
|
||||
-- Stripe transaction metadata (for validation and audit)
|
||||
stripe_charge_id VARCHAR,
|
||||
stripe_customer_id VARCHAR,
|
||||
payment_completed_at TIMESTAMP WITH TIME ZONE,
|
||||
card_last4 VARCHAR(4),
|
||||
card_brand VARCHAR(20),
|
||||
stripe_receipt_url VARCHAR,
|
||||
|
||||
-- Metadata
|
||||
notes TEXT,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE
|
||||
);
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- Display progress
|
||||
SELECT 'Step 3/8 completed: Subscription and donation tables created' AS progress;
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ============================================================================
|
||||
-- SECTION 4: Create RBAC Tables
|
||||
-- ============================================================================
|
||||
|
||||
-- Permissions table
|
||||
CREATE TABLE IF NOT EXISTS permissions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
|
||||
code VARCHAR NOT NULL UNIQUE,
|
||||
name VARCHAR NOT NULL,
|
||||
description TEXT,
|
||||
module VARCHAR NOT NULL,
|
||||
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Roles table (for dynamic RBAC)
|
||||
CREATE TABLE IF NOT EXISTS roles (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
|
||||
code VARCHAR NOT NULL UNIQUE,
|
||||
name VARCHAR NOT NULL,
|
||||
description TEXT,
|
||||
is_system_role BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
created_by UUID REFERENCES users(id) ON DELETE SET NULL
|
||||
);
|
||||
|
||||
-- Role Permissions junction table
|
||||
CREATE TABLE IF NOT EXISTS role_permissions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
|
||||
role userrole, -- Legacy enum-based role (for backward compatibility)
|
||||
role_id UUID REFERENCES roles(id) ON DELETE CASCADE, -- Dynamic role
|
||||
permission_id UUID NOT NULL REFERENCES permissions(id) ON DELETE CASCADE,
|
||||
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
created_by UUID REFERENCES users(id) ON DELETE SET NULL
|
||||
);
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- Display progress
|
||||
SELECT 'Step 4/8 completed: RBAC tables created' AS progress;
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ============================================================================
|
||||
-- SECTION 5: Create Document Management Tables
|
||||
-- ============================================================================
|
||||
|
||||
-- Newsletter Archive table
|
||||
CREATE TABLE IF NOT EXISTS newsletter_archives (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
|
||||
title VARCHAR NOT NULL,
|
||||
file_url VARCHAR NOT NULL,
|
||||
file_size_bytes INTEGER,
|
||||
issue_date DATE NOT NULL,
|
||||
description TEXT,
|
||||
|
||||
uploaded_by UUID REFERENCES users(id),
|
||||
uploaded_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Financial Reports table
|
||||
CREATE TABLE IF NOT EXISTS financial_reports (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
|
||||
title VARCHAR NOT NULL,
|
||||
file_url VARCHAR NOT NULL,
|
||||
file_size_bytes INTEGER,
|
||||
fiscal_period VARCHAR NOT NULL,
|
||||
report_type VARCHAR,
|
||||
|
||||
uploaded_by UUID REFERENCES users(id),
|
||||
uploaded_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Bylaws Documents table
|
||||
CREATE TABLE IF NOT EXISTS bylaws_documents (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
|
||||
title VARCHAR NOT NULL,
|
||||
file_url VARCHAR NOT NULL,
|
||||
file_size_bytes INTEGER,
|
||||
version VARCHAR NOT NULL,
|
||||
effective_date DATE NOT NULL,
|
||||
description TEXT,
|
||||
is_current BOOLEAN DEFAULT TRUE,
|
||||
|
||||
uploaded_by UUID REFERENCES users(id),
|
||||
uploaded_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- Display progress
|
||||
SELECT 'Step 5/8 completed: Document management tables created' AS progress;
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ============================================================================
|
||||
-- SECTION 6: Create System Tables
|
||||
-- ============================================================================
|
||||
|
||||
-- Storage Usage table
|
||||
CREATE TABLE IF NOT EXISTS storage_usage (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
|
||||
total_bytes_used BIGINT NOT NULL DEFAULT 0,
|
||||
max_bytes_allowed BIGINT NOT NULL DEFAULT 1073741824, -- 1GB
|
||||
last_updated TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- User Invitations table
|
||||
CREATE TABLE IF NOT EXISTS user_invitations (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
|
||||
email VARCHAR NOT NULL,
|
||||
token VARCHAR NOT NULL UNIQUE,
|
||||
role userrole NOT NULL,
|
||||
status invitationstatus NOT NULL DEFAULT 'pending',
|
||||
|
||||
invited_by UUID REFERENCES users(id) ON DELETE SET NULL,
|
||||
expires_at TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
accepted_at TIMESTAMP WITH TIME ZONE,
|
||||
revoked_at TIMESTAMP WITH TIME ZONE,
|
||||
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Add FK constraints to import_jobs (now that users table exists)
|
||||
ALTER TABLE import_jobs
|
||||
ADD CONSTRAINT fk_import_jobs_rollback_by FOREIGN KEY (rollback_by) REFERENCES users(id),
|
||||
ADD CONSTRAINT fk_import_jobs_started_by FOREIGN KEY (started_by) REFERENCES users(id);
|
||||
|
||||
-- Import Rollback Audit table (for tracking rollback operations)
|
||||
CREATE TABLE IF NOT EXISTS import_rollback_audit (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
import_job_id UUID NOT NULL REFERENCES import_jobs(id),
|
||||
rolled_back_by UUID NOT NULL REFERENCES users(id),
|
||||
rolled_back_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
|
||||
deleted_user_count INTEGER NOT NULL,
|
||||
deleted_user_ids JSONB NOT NULL,
|
||||
reason TEXT,
|
||||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- Display progress
|
||||
SELECT 'Step 6/8 completed: System tables created' AS progress;
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ============================================================================
|
||||
-- SECTION 7: Create Indexes
|
||||
-- ============================================================================
|
||||
|
||||
-- Users table indexes
|
||||
CREATE INDEX IF NOT EXISTS idx_users_email ON users(email);
|
||||
CREATE INDEX IF NOT EXISTS idx_users_status ON users(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_users_role ON users(role);
|
||||
CREATE INDEX IF NOT EXISTS idx_users_role_id ON users(role_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_users_email_verified ON users(email_verified);
|
||||
CREATE INDEX IF NOT EXISTS idx_users_rejected_at ON users(rejected_at) WHERE rejected_at IS NOT NULL;
|
||||
CREATE INDEX IF NOT EXISTS idx_users_created_at ON users(created_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_users_import_job ON users(import_job_id) WHERE import_job_id IS NOT NULL;
|
||||
CREATE INDEX IF NOT EXISTS idx_users_import_source ON users(import_source) WHERE import_source IS NOT NULL;
|
||||
|
||||
-- Events table indexes
|
||||
CREATE INDEX IF NOT EXISTS idx_events_created_by ON events(created_by);
|
||||
CREATE INDEX IF NOT EXISTS idx_events_start_at ON events(start_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_events_published ON events(published);
|
||||
|
||||
-- Event RSVPs indexes
|
||||
CREATE INDEX IF NOT EXISTS idx_event_rsvps_event_id ON event_rsvps(event_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_event_rsvps_user_id ON event_rsvps(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_event_rsvps_rsvp_status ON event_rsvps(rsvp_status);
|
||||
|
||||
-- Event Gallery indexes
|
||||
CREATE INDEX IF NOT EXISTS idx_event_galleries_event_id ON event_galleries(event_id);
|
||||
|
||||
-- Subscriptions indexes
|
||||
CREATE INDEX IF NOT EXISTS idx_subscriptions_user_id ON subscriptions(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_subscriptions_plan_id ON subscriptions(plan_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_subscriptions_status ON subscriptions(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_subscriptions_stripe_subscription_id ON subscriptions(stripe_subscription_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_subscriptions_payment_intent ON subscriptions(stripe_payment_intent_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_subscriptions_charge_id ON subscriptions(stripe_charge_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_subscriptions_invoice_id ON subscriptions(stripe_invoice_id);
|
||||
|
||||
-- Donations indexes
|
||||
CREATE INDEX IF NOT EXISTS idx_donation_user ON donations(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_donation_type ON donations(donation_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_donation_status ON donations(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_donation_created ON donations(created_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_donation_payment_intent ON donations(stripe_payment_intent_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_donation_charge_id ON donations(stripe_charge_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_donation_customer_id ON donations(stripe_customer_id);
|
||||
|
||||
-- Import Jobs indexes
|
||||
CREATE INDEX IF NOT EXISTS idx_import_jobs_status ON import_jobs(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_import_jobs_started_by ON import_jobs(started_by);
|
||||
|
||||
-- Import Rollback Audit indexes
|
||||
CREATE INDEX IF NOT EXISTS idx_rollback_audit_import_job ON import_rollback_audit(import_job_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_rollback_audit_rolled_back_at ON import_rollback_audit(rolled_back_at DESC);
|
||||
|
||||
-- Permissions indexes
|
||||
CREATE INDEX IF NOT EXISTS idx_permissions_code ON permissions(code);
|
||||
CREATE INDEX IF NOT EXISTS idx_permissions_module ON permissions(module);
|
||||
|
||||
-- Roles indexes
|
||||
CREATE INDEX IF NOT EXISTS idx_roles_code ON roles(code);
|
||||
CREATE INDEX IF NOT EXISTS idx_roles_is_system_role ON roles(is_system_role);
|
||||
|
||||
-- Role Permissions indexes
|
||||
CREATE INDEX IF NOT EXISTS idx_role_permissions_role ON role_permissions(role);
|
||||
CREATE INDEX IF NOT EXISTS idx_role_permissions_role_id ON role_permissions(role_id);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_role_permission ON role_permissions(role, permission_id) WHERE role IS NOT NULL;
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_dynamic_role_permission ON role_permissions(role_id, permission_id) WHERE role_id IS NOT NULL;
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- Display progress
|
||||
SELECT 'Step 7/8 completed: Indexes created' AS progress;
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ============================================================================
|
||||
-- SECTION 8: Initialize Default Data
|
||||
-- ============================================================================
|
||||
|
||||
-- Insert initial storage usage record
|
||||
INSERT INTO storage_usage (id, total_bytes_used, max_bytes_allowed, last_updated)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
0,
|
||||
1073741824, -- 1GB
|
||||
CURRENT_TIMESTAMP
|
||||
WHERE NOT EXISTS (SELECT 1 FROM storage_usage);
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- Display progress
|
||||
SELECT 'Step 8/8 completed: Default data initialized' AS progress;
|
||||
|
||||
-- ============================================================================
|
||||
-- Migration Complete
|
||||
-- ============================================================================
|
||||
|
||||
SELECT '
|
||||
================================================================================
|
||||
✅ Migration 000 completed successfully!
|
||||
================================================================================
|
||||
|
||||
Database schema initialized with:
|
||||
- 10 ENUM types
|
||||
- 17 tables (users, events, subscriptions, donations, RBAC, documents, system)
|
||||
- 30+ indexes for performance
|
||||
- 1 storage usage record
|
||||
|
||||
Next steps:
|
||||
1. Run: python seed_permissions_rbac.py (to populate permissions and roles)
|
||||
2. Run: python create_admin.py (to create superadmin user)
|
||||
3. Run remaining migrations in sequence (001-010)
|
||||
|
||||
Database is ready for LOAF membership platform! 🎉
|
||||
================================================================================
|
||||
' AS migration_complete;
|
||||
20
migrations/001_add_member_since_field.sql
Normal file
20
migrations/001_add_member_since_field.sql
Normal file
@@ -0,0 +1,20 @@
|
||||
-- Migration: Add member_since field to users table
|
||||
--
|
||||
-- This field allows admins to manually set historical membership dates
|
||||
-- for users imported from the old WordPress site.
|
||||
--
|
||||
-- For new users, it can be left NULL and will default to created_at when displayed.
|
||||
-- For imported users, admins can set it to the actual date they became a member.
|
||||
|
||||
-- Add member_since column (nullable timestamp with timezone)
|
||||
ALTER TABLE users
|
||||
ADD COLUMN member_since TIMESTAMP WITH TIME ZONE;
|
||||
|
||||
-- Backfill existing active members: use created_at as default
|
||||
-- This is reasonable since they became members when they created their account
|
||||
UPDATE users
|
||||
SET member_since = created_at
|
||||
WHERE status = 'active' AND member_since IS NULL;
|
||||
|
||||
-- Success message
|
||||
SELECT 'Migration completed: member_since field added to users table' AS result;
|
||||
59
migrations/002_rename_approval_to_validation.sql
Normal file
59
migrations/002_rename_approval_to_validation.sql
Normal file
@@ -0,0 +1,59 @@
|
||||
-- Migration: Rename approval terminology to validation in database
|
||||
--
|
||||
-- Updates all user status values from:
|
||||
-- - pending_approval → pending_validation
|
||||
-- - pre_approved → pre_validated
|
||||
--
|
||||
-- This migration aligns with the client's request to change all "approval"
|
||||
-- terminology to "validation" throughout the application.
|
||||
--
|
||||
-- IMPORTANT: This migration uses multiple transactions because PostgreSQL
|
||||
-- requires enum values to be committed before they can be used.
|
||||
|
||||
-- ============================================================
|
||||
-- TRANSACTION 1: Add new enum values
|
||||
-- ============================================================
|
||||
|
||||
-- Add renamed values (approval → validation)
|
||||
ALTER TYPE userstatus ADD VALUE IF NOT EXISTS 'pending_validation';
|
||||
ALTER TYPE userstatus ADD VALUE IF NOT EXISTS 'pre_validated';
|
||||
|
||||
-- Add new status types from Phase 4 (if they don't already exist)
|
||||
ALTER TYPE userstatus ADD VALUE IF NOT EXISTS 'canceled';
|
||||
ALTER TYPE userstatus ADD VALUE IF NOT EXISTS 'expired';
|
||||
ALTER TYPE userstatus ADD VALUE IF NOT EXISTS 'abandoned';
|
||||
|
||||
-- Commit the enum additions so they can be used
|
||||
COMMIT;
|
||||
|
||||
-- Display progress
|
||||
SELECT 'Step 1 completed: New enum values added' AS progress;
|
||||
|
||||
-- ============================================================
|
||||
-- TRANSACTION 2: Update existing data
|
||||
-- ============================================================
|
||||
|
||||
-- Start a new transaction
|
||||
BEGIN;
|
||||
|
||||
-- Update pending_approval to pending_validation
|
||||
UPDATE users
|
||||
SET status = 'pending_validation'
|
||||
WHERE status = 'pending_approval';
|
||||
|
||||
-- Update pre_approved to pre_validated
|
||||
UPDATE users
|
||||
SET status = 'pre_validated'
|
||||
WHERE status = 'pre_approved';
|
||||
|
||||
-- Commit the data updates
|
||||
COMMIT;
|
||||
|
||||
-- Success message
|
||||
SELECT 'Migration completed: approval terminology updated to validation' AS result;
|
||||
|
||||
-- Note: All API endpoints and frontend components must also be updated
|
||||
-- to use 'validation' terminology instead of 'approval'
|
||||
--
|
||||
-- Note: The old enum values 'pending_approval' and 'pre_approved' will remain
|
||||
-- in the enum type but will not be used. This is normal PostgreSQL behavior.
|
||||
23
migrations/003_add_tos_acceptance.sql
Normal file
23
migrations/003_add_tos_acceptance.sql
Normal file
@@ -0,0 +1,23 @@
|
||||
-- Migration: Add Terms of Service acceptance fields to users table
|
||||
--
|
||||
-- This migration adds:
|
||||
-- - accepts_tos: Boolean field to track ToS acceptance
|
||||
-- - tos_accepted_at: Timestamp of when user accepted ToS
|
||||
|
||||
-- Add accepts_tos column (Boolean, default False)
|
||||
ALTER TABLE users
|
||||
ADD COLUMN accepts_tos BOOLEAN DEFAULT FALSE NOT NULL;
|
||||
|
||||
-- Add tos_accepted_at column (nullable timestamp)
|
||||
ALTER TABLE users
|
||||
ADD COLUMN tos_accepted_at TIMESTAMP WITH TIME ZONE;
|
||||
|
||||
-- Backfill existing users: mark as accepted with created_at date
|
||||
-- This is reasonable since existing users registered before ToS requirement
|
||||
UPDATE users
|
||||
SET accepts_tos = TRUE,
|
||||
tos_accepted_at = created_at
|
||||
WHERE created_at IS NOT NULL;
|
||||
|
||||
-- Success message
|
||||
SELECT 'Migration completed: ToS acceptance fields added to users table' AS result;
|
||||
39
migrations/004_add_reminder_tracking_fields.sql
Normal file
39
migrations/004_add_reminder_tracking_fields.sql
Normal file
@@ -0,0 +1,39 @@
|
||||
-- Migration: Add Reminder Tracking Fields to User Model
|
||||
--
|
||||
-- This migration adds fields to track reminder emails sent to users,
|
||||
-- allowing admins to see how many reminders each user has received
|
||||
-- and when the last reminder was sent.
|
||||
--
|
||||
-- This is especially helpful for older members who may need personal outreach.
|
||||
|
||||
-- Add email verification reminder tracking
|
||||
ALTER TABLE users
|
||||
ADD COLUMN email_verification_reminders_sent INTEGER DEFAULT 0 NOT NULL;
|
||||
|
||||
ALTER TABLE users
|
||||
ADD COLUMN last_email_verification_reminder_at TIMESTAMP WITH TIME ZONE;
|
||||
|
||||
-- Add event attendance reminder tracking
|
||||
ALTER TABLE users
|
||||
ADD COLUMN event_attendance_reminders_sent INTEGER DEFAULT 0 NOT NULL;
|
||||
|
||||
ALTER TABLE users
|
||||
ADD COLUMN last_event_attendance_reminder_at TIMESTAMP WITH TIME ZONE;
|
||||
|
||||
-- Add payment reminder tracking
|
||||
ALTER TABLE users
|
||||
ADD COLUMN payment_reminders_sent INTEGER DEFAULT 0 NOT NULL;
|
||||
|
||||
ALTER TABLE users
|
||||
ADD COLUMN last_payment_reminder_at TIMESTAMP WITH TIME ZONE;
|
||||
|
||||
-- Add renewal reminder tracking
|
||||
ALTER TABLE users
|
||||
ADD COLUMN renewal_reminders_sent INTEGER DEFAULT 0 NOT NULL;
|
||||
|
||||
ALTER TABLE users
|
||||
ADD COLUMN last_renewal_reminder_at TIMESTAMP WITH TIME ZONE;
|
||||
|
||||
-- Success message
|
||||
SELECT 'Migration completed: Reminder tracking fields added to users table' AS result;
|
||||
SELECT 'Admins can now track reminder counts in the dashboard' AS note;
|
||||
172
migrations/004_fix_all_permissions.sql
Normal file
172
migrations/004_fix_all_permissions.sql
Normal file
@@ -0,0 +1,172 @@
|
||||
-- ============================================================================
|
||||
-- Fix All Permission Codes to Match Backend Code
|
||||
-- This migration adds all missing permissions that the code actually checks for
|
||||
-- ============================================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ============================================================================
|
||||
-- Delete old incorrect permissions and role mappings
|
||||
-- ============================================================================
|
||||
|
||||
DELETE FROM role_permissions;
|
||||
DELETE FROM permissions;
|
||||
|
||||
-- ============================================================================
|
||||
-- Create ALL permissions that backend code actually checks for
|
||||
-- ============================================================================
|
||||
|
||||
INSERT INTO permissions (id, code, name, description, module, created_at)
|
||||
VALUES
|
||||
-- Users Permissions
|
||||
(gen_random_uuid(), 'users.view', 'View Users', 'View user list and profiles', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.create', 'Create Users', 'Create new users', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.edit', 'Edit Users', 'Edit user information', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.approve', 'Approve Users', 'Approve pending memberships', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.import', 'Import Users', 'Import users from CSV', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.export', 'Export Users', 'Export users to CSV', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.status', 'Change User Status', 'Update user status', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.reset_password', 'Reset User Password', 'Reset user passwords', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.resend_verification', 'Resend Verification', 'Resend email verification', 'users', NOW()),
|
||||
|
||||
-- Events Permissions
|
||||
(gen_random_uuid(), 'events.view', 'View Events', 'View event list', 'events', NOW()),
|
||||
(gen_random_uuid(), 'events.create', 'Create Events', 'Create new events', 'events', NOW()),
|
||||
(gen_random_uuid(), 'events.edit', 'Edit Events', 'Edit event information', 'events', NOW()),
|
||||
(gen_random_uuid(), 'events.delete', 'Delete Events', 'Delete events', 'events', NOW()),
|
||||
(gen_random_uuid(), 'events.rsvps', 'View RSVPs', 'View event RSVPs', 'events', NOW()),
|
||||
(gen_random_uuid(), 'events.attendance', 'Manage Attendance', 'Mark attendance', 'events', NOW()),
|
||||
|
||||
-- Gallery Permissions
|
||||
(gen_random_uuid(), 'gallery.upload', 'Upload Photos', 'Upload event photos', 'gallery', NOW()),
|
||||
(gen_random_uuid(), 'gallery.edit', 'Edit Gallery', 'Edit photo captions', 'gallery', NOW()),
|
||||
(gen_random_uuid(), 'gallery.delete', 'Delete Photos', 'Delete event photos', 'gallery', NOW()),
|
||||
|
||||
-- Subscriptions Permissions
|
||||
(gen_random_uuid(), 'subscriptions.view', 'View Subscriptions', 'View user subscriptions', 'subscriptions', NOW()),
|
||||
(gen_random_uuid(), 'subscriptions.plans', 'Manage Plans', 'Manage subscription plans', 'subscriptions', NOW()),
|
||||
(gen_random_uuid(), 'subscriptions.edit', 'Edit Subscriptions', 'Edit user subscriptions', 'subscriptions', NOW()),
|
||||
(gen_random_uuid(), 'subscriptions.cancel', 'Cancel Subscriptions', 'Cancel subscriptions', 'subscriptions', NOW()),
|
||||
(gen_random_uuid(), 'subscriptions.activate', 'Activate Subscriptions', 'Manually activate subscriptions', 'subscriptions', NOW()),
|
||||
(gen_random_uuid(), 'subscriptions.export', 'Export Subscriptions', 'Export subscription data', 'subscriptions', NOW()),
|
||||
|
||||
-- Donations Permissions
|
||||
(gen_random_uuid(), 'donations.view', 'View Donations', 'View donation records', 'donations', NOW()),
|
||||
(gen_random_uuid(), 'donations.export', 'Export Donations', 'Export donation data', 'donations', NOW()),
|
||||
|
||||
-- Financials Permissions (Financial Reports)
|
||||
(gen_random_uuid(), 'financials.create', 'Create Financial Reports', 'Upload financial reports', 'financials', NOW()),
|
||||
(gen_random_uuid(), 'financials.edit', 'Edit Financial Reports', 'Edit financial reports', 'financials', NOW()),
|
||||
(gen_random_uuid(), 'financials.delete', 'Delete Financial Reports', 'Delete financial reports', 'financials', NOW()),
|
||||
|
||||
-- Newsletters Permissions
|
||||
(gen_random_uuid(), 'newsletters.create', 'Create Newsletters', 'Upload newsletter archives', 'newsletters', NOW()),
|
||||
(gen_random_uuid(), 'newsletters.edit', 'Edit Newsletters', 'Edit newsletter archives', 'newsletters', NOW()),
|
||||
(gen_random_uuid(), 'newsletters.delete', 'Delete Newsletters', 'Delete newsletter archives', 'newsletters', NOW()),
|
||||
|
||||
-- Bylaws Permissions
|
||||
(gen_random_uuid(), 'bylaws.create', 'Create Bylaws', 'Upload bylaws documents', 'bylaws', NOW()),
|
||||
(gen_random_uuid(), 'bylaws.edit', 'Edit Bylaws', 'Edit bylaws documents', 'bylaws', NOW()),
|
||||
(gen_random_uuid(), 'bylaws.delete', 'Delete Bylaws', 'Delete bylaws documents', 'bylaws', NOW()),
|
||||
|
||||
-- Settings Permissions
|
||||
(gen_random_uuid(), 'settings.storage', 'View Storage Usage', 'View storage usage statistics', 'settings', NOW())
|
||||
ON CONFLICT (code) DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- Assign Permissions to Roles
|
||||
-- ============================================================================
|
||||
|
||||
-- Guest Role: No permissions
|
||||
-- (Members can only view their own data through different endpoints)
|
||||
|
||||
-- Member Role: Basic viewing only
|
||||
INSERT INTO role_permissions (id, role, role_id, permission_id, created_at)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
'member',
|
||||
(SELECT id FROM roles WHERE code = 'member'),
|
||||
p.id,
|
||||
NOW()
|
||||
FROM permissions p
|
||||
WHERE p.code IN (
|
||||
'events.view'
|
||||
)
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
-- Admin Role: Full management except financial
|
||||
INSERT INTO role_permissions (id, role, role_id, permission_id, created_at)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
'admin',
|
||||
(SELECT id FROM roles WHERE code = 'admin'),
|
||||
p.id,
|
||||
NOW()
|
||||
FROM permissions p
|
||||
WHERE p.code IN (
|
||||
-- User Management
|
||||
'users.view', 'users.create', 'users.edit', 'users.approve', 'users.import',
|
||||
'users.export', 'users.status', 'users.reset_password', 'users.resend_verification',
|
||||
|
||||
-- Event Management
|
||||
'events.view', 'events.create', 'events.edit', 'events.delete', 'events.rsvps', 'events.attendance',
|
||||
|
||||
-- Gallery
|
||||
'gallery.upload', 'gallery.edit', 'gallery.delete',
|
||||
|
||||
-- Content
|
||||
'newsletters.create', 'newsletters.edit', 'newsletters.delete',
|
||||
'bylaws.create', 'bylaws.edit', 'bylaws.delete',
|
||||
|
||||
-- Settings
|
||||
'settings.storage'
|
||||
)
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
-- Finance Role: Financial permissions + basic viewing
|
||||
INSERT INTO role_permissions (id, role, role_id, permission_id, created_at)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
'finance',
|
||||
(SELECT id FROM roles WHERE code = 'finance'),
|
||||
p.id,
|
||||
NOW()
|
||||
FROM permissions p
|
||||
WHERE p.code IN (
|
||||
-- Subscriptions & Donations
|
||||
'subscriptions.view', 'subscriptions.plans', 'subscriptions.edit',
|
||||
'subscriptions.cancel', 'subscriptions.activate', 'subscriptions.export',
|
||||
'donations.view', 'donations.export',
|
||||
|
||||
-- Financial Reports
|
||||
'financials.create', 'financials.edit', 'financials.delete',
|
||||
|
||||
-- Basic Access
|
||||
'users.view',
|
||||
'events.view'
|
||||
)
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
-- Superadmin Role: ALL permissions
|
||||
INSERT INTO role_permissions (id, role, role_id, permission_id, created_at)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
'superadmin',
|
||||
(SELECT id FROM roles WHERE code = 'superadmin'),
|
||||
p.id,
|
||||
NOW()
|
||||
FROM permissions p
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
COMMIT;
|
||||
|
||||
\echo '✅ All permissions fixed!'
|
||||
\echo ''
|
||||
\echo 'Permission counts by role:'
|
||||
\echo ' - Guest: 0'
|
||||
\echo ' - Member: 1'
|
||||
\echo ' - Admin: ~25'
|
||||
\echo ' - Finance: ~13'
|
||||
\echo ' - Superadmin: ALL (40 total)'
|
||||
\echo ''
|
||||
\echo 'Next: Restart backend with: pm2 restart membership-backend'
|
||||
187
migrations/005_add_rbac_and_invitations.sql
Normal file
187
migrations/005_add_rbac_and_invitations.sql
Normal file
@@ -0,0 +1,187 @@
|
||||
-- Migration 005: Add RBAC Permission Management, User Invitations, and Import Jobs
|
||||
--
|
||||
-- This migration adds:
|
||||
-- 1. Superadmin role to UserRole enum
|
||||
-- 2. Permission and RolePermission tables for RBAC
|
||||
-- 3. UserInvitation table for email-based invitations
|
||||
-- 4. ImportJob table for CSV import tracking
|
||||
--
|
||||
-- IMPORTANT: PostgreSQL requires enum values to be committed before they can be used,
|
||||
-- so this migration uses multiple transactions.
|
||||
|
||||
-- ============================================================
|
||||
-- TRANSACTION 1: Add new enum values
|
||||
-- ============================================================
|
||||
|
||||
-- Add 'superadmin' to UserRole enum
|
||||
ALTER TYPE userrole ADD VALUE IF NOT EXISTS 'superadmin';
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- Display progress
|
||||
SELECT 'Step 1 completed: UserRole enum updated with superadmin' AS progress;
|
||||
|
||||
-- ============================================================
|
||||
-- TRANSACTION 2: Create new enum types
|
||||
-- ============================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Create InvitationStatus enum
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE invitationstatus AS ENUM ('pending', 'accepted', 'expired', 'revoked');
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
|
||||
-- Create ImportJobStatus enum
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE importjobstatus AS ENUM ('processing', 'completed', 'failed', 'partial');
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- Display progress
|
||||
SELECT 'Step 2 completed: New enum types created' AS progress;
|
||||
|
||||
-- ============================================================
|
||||
-- TRANSACTION 3: Create Permission and RolePermission tables
|
||||
-- ============================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Create permissions table
|
||||
CREATE TABLE IF NOT EXISTS permissions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
code VARCHAR NOT NULL UNIQUE,
|
||||
name VARCHAR NOT NULL,
|
||||
description TEXT,
|
||||
module VARCHAR NOT NULL,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Create indexes for permissions
|
||||
CREATE INDEX IF NOT EXISTS idx_permissions_code ON permissions(code);
|
||||
CREATE INDEX IF NOT EXISTS idx_permissions_module ON permissions(module);
|
||||
|
||||
-- Create role_permissions junction table
|
||||
CREATE TABLE IF NOT EXISTS role_permissions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
role userrole NOT NULL,
|
||||
permission_id UUID NOT NULL REFERENCES permissions(id) ON DELETE CASCADE,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
created_by UUID REFERENCES users(id) ON DELETE SET NULL
|
||||
);
|
||||
|
||||
-- Create indexes for role_permissions
|
||||
CREATE INDEX IF NOT EXISTS idx_role_permissions_role ON role_permissions(role);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_role_permission ON role_permissions(role, permission_id);
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- Display progress
|
||||
SELECT 'Step 3 completed: Permission tables created' AS progress;
|
||||
|
||||
-- ============================================================
|
||||
-- TRANSACTION 4: Create UserInvitation table
|
||||
-- ============================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Create user_invitations table
|
||||
CREATE TABLE IF NOT EXISTS user_invitations (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
email VARCHAR NOT NULL,
|
||||
token VARCHAR NOT NULL UNIQUE,
|
||||
role userrole NOT NULL,
|
||||
status invitationstatus NOT NULL DEFAULT 'pending',
|
||||
|
||||
-- Optional pre-filled information
|
||||
first_name VARCHAR,
|
||||
last_name VARCHAR,
|
||||
phone VARCHAR,
|
||||
|
||||
-- Invitation tracking
|
||||
invited_by UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
invited_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
expires_at TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
accepted_at TIMESTAMP WITH TIME ZONE,
|
||||
accepted_by UUID REFERENCES users(id) ON DELETE SET NULL
|
||||
);
|
||||
|
||||
-- Create indexes for user_invitations
|
||||
CREATE INDEX IF NOT EXISTS idx_user_invitations_email ON user_invitations(email);
|
||||
CREATE INDEX IF NOT EXISTS idx_user_invitations_token ON user_invitations(token);
|
||||
CREATE INDEX IF NOT EXISTS idx_user_invitations_status ON user_invitations(status);
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- Display progress
|
||||
SELECT 'Step 4 completed: UserInvitation table created' AS progress;
|
||||
|
||||
-- ============================================================
|
||||
-- TRANSACTION 5: Create ImportJob table
|
||||
-- ============================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Create import_jobs table
|
||||
CREATE TABLE IF NOT EXISTS import_jobs (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
filename VARCHAR NOT NULL,
|
||||
file_key VARCHAR,
|
||||
total_rows INTEGER NOT NULL,
|
||||
processed_rows INTEGER NOT NULL DEFAULT 0,
|
||||
successful_rows INTEGER NOT NULL DEFAULT 0,
|
||||
failed_rows INTEGER NOT NULL DEFAULT 0,
|
||||
status importjobstatus NOT NULL DEFAULT 'processing',
|
||||
errors JSONB NOT NULL DEFAULT '[]'::jsonb,
|
||||
|
||||
-- Tracking
|
||||
imported_by UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
started_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
completed_at TIMESTAMP WITH TIME ZONE
|
||||
);
|
||||
|
||||
-- Create indexes for import_jobs
|
||||
CREATE INDEX IF NOT EXISTS idx_import_jobs_imported_by ON import_jobs(imported_by);
|
||||
CREATE INDEX IF NOT EXISTS idx_import_jobs_status ON import_jobs(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_import_jobs_started_at ON import_jobs(started_at DESC);
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- Success message
|
||||
SELECT 'Migration 005 completed successfully: RBAC, Invitations, and Import Jobs tables created' AS result;
|
||||
|
||||
-- ============================================================
|
||||
-- Verification Queries
|
||||
-- ============================================================
|
||||
|
||||
-- Verify UserRole enum includes superadmin
|
||||
SELECT enumlabel FROM pg_enum
|
||||
WHERE enumtypid = 'userrole'::regtype
|
||||
ORDER BY enumlabel;
|
||||
|
||||
-- Verify new tables exist
|
||||
SELECT table_name FROM information_schema.tables
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name IN ('permissions', 'role_permissions', 'user_invitations', 'import_jobs')
|
||||
ORDER BY table_name;
|
||||
|
||||
-- ============================================================
|
||||
-- Rollback Instructions (if needed)
|
||||
-- ============================================================
|
||||
|
||||
-- To rollback this migration, run:
|
||||
--
|
||||
-- DROP TABLE IF EXISTS import_jobs CASCADE;
|
||||
-- DROP TABLE IF EXISTS user_invitations CASCADE;
|
||||
-- DROP TABLE IF EXISTS role_permissions CASCADE;
|
||||
-- DROP TABLE IF EXISTS permissions CASCADE;
|
||||
-- DROP TYPE IF EXISTS importjobstatus;
|
||||
-- DROP TYPE IF EXISTS invitationstatus;
|
||||
--
|
||||
-- Note: Cannot remove 'superadmin' from UserRole enum without recreating the entire enum
|
||||
-- and updating all dependent tables. Only do this if no users have the superadmin role.
|
||||
216
migrations/005_complete_permissions.sql
Normal file
216
migrations/005_complete_permissions.sql
Normal file
@@ -0,0 +1,216 @@
|
||||
-- ============================================================================
|
||||
-- Complete Permission Set (60 permissions from development)
|
||||
-- Run this to sync production with development permissions
|
||||
-- ============================================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Delete old permissions and mappings
|
||||
DELETE FROM role_permissions;
|
||||
DELETE FROM permissions;
|
||||
|
||||
-- ============================================================================
|
||||
-- Create ALL 60 permissions (matching development)
|
||||
-- ============================================================================
|
||||
|
||||
INSERT INTO permissions (id, code, name, description, module, created_at)
|
||||
VALUES
|
||||
-- Users Permissions (11)
|
||||
(gen_random_uuid(), 'users.view', 'View Users', 'View user list and profiles', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.create', 'Create Users', 'Create new users', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.edit', 'Edit Users', 'Edit user information', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.delete', 'Delete Users', 'Delete users', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.status', 'Change User Status', 'Update user status', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.approve', 'Approve Users', 'Approve pending memberships', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.export', 'Export Users', 'Export users to CSV', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.import', 'Import Users', 'Import users from CSV', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.reset_password', 'Reset User Password', 'Reset user passwords', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.resend_verification', 'Resend Verification', 'Resend email verification', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.invite', 'Invite Users', 'Send user invitations', 'users', NOW()),
|
||||
|
||||
-- Events Permissions (8)
|
||||
(gen_random_uuid(), 'events.view', 'View Events', 'View event list', 'events', NOW()),
|
||||
(gen_random_uuid(), 'events.create', 'Create Events', 'Create new events', 'events', NOW()),
|
||||
(gen_random_uuid(), 'events.edit', 'Edit Events', 'Edit event information', 'events', NOW()),
|
||||
(gen_random_uuid(), 'events.delete', 'Delete Events', 'Delete events', 'events', NOW()),
|
||||
(gen_random_uuid(), 'events.publish', 'Publish Events', 'Publish/unpublish events', 'events', NOW()),
|
||||
(gen_random_uuid(), 'events.attendance', 'Manage Attendance', 'Mark attendance', 'events', NOW()),
|
||||
(gen_random_uuid(), 'events.rsvps', 'View RSVPs', 'View event RSVPs', 'events', NOW()),
|
||||
(gen_random_uuid(), 'events.calendar_export', 'Export Calendar', 'Export events to calendar', 'events', NOW()),
|
||||
|
||||
-- Subscriptions Permissions (7)
|
||||
(gen_random_uuid(), 'subscriptions.view', 'View Subscriptions', 'View user subscriptions', 'subscriptions', NOW()),
|
||||
(gen_random_uuid(), 'subscriptions.create', 'Create Subscriptions', 'Create new subscriptions', 'subscriptions', NOW()),
|
||||
(gen_random_uuid(), 'subscriptions.edit', 'Edit Subscriptions', 'Edit user subscriptions', 'subscriptions', NOW()),
|
||||
(gen_random_uuid(), 'subscriptions.cancel', 'Cancel Subscriptions', 'Cancel subscriptions', 'subscriptions', NOW()),
|
||||
(gen_random_uuid(), 'subscriptions.activate', 'Activate Subscriptions', 'Manually activate subscriptions', 'subscriptions', NOW()),
|
||||
(gen_random_uuid(), 'subscriptions.plans', 'Manage Plans', 'Manage subscription plans', 'subscriptions', NOW()),
|
||||
(gen_random_uuid(), 'subscriptions.export', 'Export Subscriptions', 'Export subscription data', 'subscriptions', NOW()),
|
||||
|
||||
-- Donations Permissions (2)
|
||||
(gen_random_uuid(), 'donations.view', 'View Donations', 'View donation records', 'donations', NOW()),
|
||||
(gen_random_uuid(), 'donations.export', 'Export Donations', 'Export donation data', 'donations', NOW()),
|
||||
|
||||
-- Financials Permissions (6)
|
||||
(gen_random_uuid(), 'financials.view', 'View Financial Reports', 'View financial reports', 'financials', NOW()),
|
||||
(gen_random_uuid(), 'financials.create', 'Create Financial Reports', 'Upload financial reports', 'financials', NOW()),
|
||||
(gen_random_uuid(), 'financials.edit', 'Edit Financial Reports', 'Edit financial reports', 'financials', NOW()),
|
||||
(gen_random_uuid(), 'financials.delete', 'Delete Financial Reports', 'Delete financial reports', 'financials', NOW()),
|
||||
(gen_random_uuid(), 'financials.export', 'Export Financial Data', 'Export financial data', 'financials', NOW()),
|
||||
(gen_random_uuid(), 'financials.payments', 'Manage Payments', 'Process manual payments', 'financials', NOW()),
|
||||
|
||||
-- Newsletters Permissions (6)
|
||||
(gen_random_uuid(), 'newsletters.view', 'View Newsletters', 'View newsletter archives', 'newsletters', NOW()),
|
||||
(gen_random_uuid(), 'newsletters.create', 'Create Newsletters', 'Upload newsletter archives', 'newsletters', NOW()),
|
||||
(gen_random_uuid(), 'newsletters.edit', 'Edit Newsletters', 'Edit newsletter archives', 'newsletters', NOW()),
|
||||
(gen_random_uuid(), 'newsletters.delete', 'Delete Newsletters', 'Delete newsletter archives', 'newsletters', NOW()),
|
||||
(gen_random_uuid(), 'newsletters.send', 'Send Newsletters', 'Send newsletters to subscribers', 'newsletters', NOW()),
|
||||
(gen_random_uuid(), 'newsletters.subscribers', 'Manage Subscribers', 'Manage newsletter subscribers', 'newsletters', NOW()),
|
||||
|
||||
-- Bylaws Permissions (5)
|
||||
(gen_random_uuid(), 'bylaws.view', 'View Bylaws', 'View bylaws documents', 'bylaws', NOW()),
|
||||
(gen_random_uuid(), 'bylaws.create', 'Create Bylaws', 'Upload bylaws documents', 'bylaws', NOW()),
|
||||
(gen_random_uuid(), 'bylaws.edit', 'Edit Bylaws', 'Edit bylaws documents', 'bylaws', NOW()),
|
||||
(gen_random_uuid(), 'bylaws.delete', 'Delete Bylaws', 'Delete bylaws documents', 'bylaws', NOW()),
|
||||
(gen_random_uuid(), 'bylaws.publish', 'Publish Bylaws', 'Mark bylaws as current', 'bylaws', NOW()),
|
||||
|
||||
-- Gallery Permissions (5)
|
||||
(gen_random_uuid(), 'gallery.view', 'View Gallery', 'View event galleries', 'gallery', NOW()),
|
||||
(gen_random_uuid(), 'gallery.upload', 'Upload Photos', 'Upload event photos', 'gallery', NOW()),
|
||||
(gen_random_uuid(), 'gallery.edit', 'Edit Gallery', 'Edit photo captions', 'gallery', NOW()),
|
||||
(gen_random_uuid(), 'gallery.delete', 'Delete Photos', 'Delete event photos', 'gallery', NOW()),
|
||||
(gen_random_uuid(), 'gallery.moderate', 'Moderate Gallery', 'Approve/reject gallery submissions', 'gallery', NOW()),
|
||||
|
||||
-- Settings Permissions (6)
|
||||
(gen_random_uuid(), 'settings.view', 'View Settings', 'View system settings', 'settings', NOW()),
|
||||
(gen_random_uuid(), 'settings.edit', 'Edit Settings', 'Edit system settings', 'settings', NOW()),
|
||||
(gen_random_uuid(), 'settings.email_templates', 'Manage Email Templates', 'Edit email templates', 'settings', NOW()),
|
||||
(gen_random_uuid(), 'settings.storage', 'View Storage Usage', 'View storage usage statistics', 'settings', NOW()),
|
||||
(gen_random_uuid(), 'settings.backup', 'Backup System', 'Create system backups', 'settings', NOW()),
|
||||
(gen_random_uuid(), 'settings.logs', 'View Logs', 'View system logs', 'settings', NOW()),
|
||||
|
||||
-- Permissions Management (4)
|
||||
(gen_random_uuid(), 'permissions.view', 'View Permissions', 'View permission list', 'permissions', NOW()),
|
||||
(gen_random_uuid(), 'permissions.assign', 'Assign Permissions', 'Assign permissions to roles', 'permissions', NOW()),
|
||||
(gen_random_uuid(), 'permissions.manage_roles', 'Manage Roles', 'Create/edit roles', 'permissions', NOW()),
|
||||
(gen_random_uuid(), 'permissions.audit', 'View Audit Logs', 'View permission audit logs', 'permissions', NOW())
|
||||
|
||||
ON CONFLICT (code) DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- Assign Permissions to Roles
|
||||
-- ============================================================================
|
||||
|
||||
-- Guest Role: No permissions
|
||||
|
||||
-- Member Role: Basic viewing only
|
||||
INSERT INTO role_permissions (id, role, role_id, permission_id, created_at)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
'member',
|
||||
(SELECT id FROM roles WHERE code = 'member'),
|
||||
p.id,
|
||||
NOW()
|
||||
FROM permissions p
|
||||
WHERE p.code IN (
|
||||
'events.view',
|
||||
'gallery.view',
|
||||
'bylaws.view',
|
||||
'newsletters.view'
|
||||
)
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
-- Admin Role: Most permissions except financials and permissions management
|
||||
INSERT INTO role_permissions (id, role, role_id, permission_id, created_at)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
'admin',
|
||||
(SELECT id FROM roles WHERE code = 'admin'),
|
||||
p.id,
|
||||
NOW()
|
||||
FROM permissions p
|
||||
WHERE p.code IN (
|
||||
-- User Management
|
||||
'users.view', 'users.create', 'users.edit', 'users.approve', 'users.import',
|
||||
'users.export', 'users.status', 'users.reset_password', 'users.resend_verification', 'users.invite',
|
||||
|
||||
-- Event Management
|
||||
'events.view', 'events.create', 'events.edit', 'events.delete', 'events.publish',
|
||||
'events.rsvps', 'events.attendance', 'events.calendar_export',
|
||||
|
||||
-- Gallery
|
||||
'gallery.view', 'gallery.upload', 'gallery.edit', 'gallery.delete', 'gallery.moderate',
|
||||
|
||||
-- Content
|
||||
'newsletters.view', 'newsletters.create', 'newsletters.edit', 'newsletters.delete',
|
||||
'newsletters.send', 'newsletters.subscribers',
|
||||
'bylaws.view', 'bylaws.create', 'bylaws.edit', 'bylaws.delete', 'bylaws.publish',
|
||||
|
||||
-- Settings (limited)
|
||||
'settings.view', 'settings.storage', 'settings.logs'
|
||||
)
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
-- Finance Role: Financial permissions + basic viewing
|
||||
INSERT INTO role_permissions (id, role, role_id, permission_id, created_at)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
'finance',
|
||||
(SELECT id FROM roles WHERE code = 'finance'),
|
||||
p.id,
|
||||
NOW()
|
||||
FROM permissions p
|
||||
WHERE p.code IN (
|
||||
-- Subscriptions & Donations
|
||||
'subscriptions.view', 'subscriptions.create', 'subscriptions.plans', 'subscriptions.edit',
|
||||
'subscriptions.cancel', 'subscriptions.activate', 'subscriptions.export',
|
||||
'donations.view', 'donations.export',
|
||||
|
||||
-- Financial Reports
|
||||
'financials.view', 'financials.create', 'financials.edit', 'financials.delete',
|
||||
'financials.export', 'financials.payments',
|
||||
|
||||
-- Basic Access
|
||||
'users.view',
|
||||
'events.view',
|
||||
'bylaws.view',
|
||||
'newsletters.view'
|
||||
)
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
-- Superadmin Role: ALL 60 permissions
|
||||
INSERT INTO role_permissions (id, role, role_id, permission_id, created_at)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
'superadmin',
|
||||
(SELECT id FROM roles WHERE code = 'superadmin'),
|
||||
p.id,
|
||||
NOW()
|
||||
FROM permissions p
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
COMMIT;
|
||||
|
||||
\echo '✅ Complete permission set created!'
|
||||
\echo ''
|
||||
\echo 'Permission counts:'
|
||||
\echo ' Total permissions: 60'
|
||||
\echo ' - users: 11'
|
||||
\echo ' - events: 8'
|
||||
\echo ' - subscriptions: 7'
|
||||
\echo ' - donations: 2'
|
||||
\echo ' - financials: 6'
|
||||
\echo ' - newsletters: 6'
|
||||
\echo ' - bylaws: 5'
|
||||
\echo ' - gallery: 5'
|
||||
\echo ' - settings: 6'
|
||||
\echo ' - permissions: 4'
|
||||
\echo ''
|
||||
\echo 'Role assignments:'
|
||||
\echo ' - Guest: 0'
|
||||
\echo ' - Member: 4 (view only)'
|
||||
\echo ' - Admin: ~40'
|
||||
\echo ' - Finance: ~20'
|
||||
\echo ' - Superadmin: 60 (all)'
|
||||
\echo ''
|
||||
\echo 'Next: Restart backend with: pm2 restart membership-backend'
|
||||
91
migrations/006_add_dynamic_roles.sql
Normal file
91
migrations/006_add_dynamic_roles.sql
Normal file
@@ -0,0 +1,91 @@
|
||||
-- Migration 006: Add Dynamic Roles System (Phase 1)
|
||||
--
|
||||
-- This migration adds support for dynamic role creation:
|
||||
-- 1. Creates the 'roles' table for dynamic role management
|
||||
-- 2. Adds 'role_id' column to 'users' table (nullable for backward compatibility)
|
||||
-- 3. Adds 'role_id' column to 'role_permissions' table (nullable for backward compatibility)
|
||||
--
|
||||
-- IMPORTANT: This is Phase 1 of the migration. The old 'role' enum columns are kept
|
||||
-- for backward compatibility. They will be removed in Phase 4 after data migration.
|
||||
|
||||
-- ============================================================
|
||||
-- TRANSACTION 1: Create roles table
|
||||
-- ============================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Create roles table
|
||||
CREATE TABLE IF NOT EXISTS roles (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
code VARCHAR NOT NULL UNIQUE,
|
||||
name VARCHAR NOT NULL,
|
||||
description TEXT,
|
||||
is_system_role BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
||||
created_by UUID REFERENCES users(id) ON DELETE SET NULL
|
||||
);
|
||||
|
||||
-- Create indexes for roles
|
||||
CREATE INDEX IF NOT EXISTS idx_roles_code ON roles(code);
|
||||
CREATE INDEX IF NOT EXISTS idx_roles_is_system_role ON roles(is_system_role);
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- Display progress
|
||||
SELECT 'Step 1 completed: roles table created' AS progress;
|
||||
|
||||
-- ============================================================
|
||||
-- TRANSACTION 2: Add role_id column to users table
|
||||
-- ============================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Add role_id column to users table (nullable for Phase 1)
|
||||
ALTER TABLE users
|
||||
ADD COLUMN IF NOT EXISTS role_id UUID REFERENCES roles(id) ON DELETE SET NULL;
|
||||
|
||||
-- Create index for role_id
|
||||
CREATE INDEX IF NOT EXISTS idx_users_role_id ON users(role_id);
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- Display progress
|
||||
SELECT 'Step 2 completed: role_id column added to users table' AS progress;
|
||||
|
||||
-- ============================================================
|
||||
-- TRANSACTION 3: Add role_id column to role_permissions table
|
||||
-- ============================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Add role_id column to role_permissions table (nullable for Phase 1)
|
||||
ALTER TABLE role_permissions
|
||||
ADD COLUMN IF NOT EXISTS role_id UUID REFERENCES roles(id) ON DELETE CASCADE;
|
||||
|
||||
-- Create index for role_id
|
||||
CREATE INDEX IF NOT EXISTS idx_role_permissions_role_id ON role_permissions(role_id);
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- Display progress
|
||||
SELECT 'Step 3 completed: role_id column added to role_permissions table' AS progress;
|
||||
|
||||
-- ============================================================
|
||||
-- Migration Complete
|
||||
-- ============================================================
|
||||
|
||||
SELECT '
|
||||
Migration 006 completed successfully!
|
||||
|
||||
Next steps:
|
||||
1. Run Phase 2: Create seed script to populate system roles (Superadmin, Finance, Member, Guest)
|
||||
2. Run Phase 3: Migrate existing data from enum to role_id
|
||||
3. Run Phase 4: Remove old enum columns (after verifying data migration)
|
||||
|
||||
Current status:
|
||||
- roles table created ✓
|
||||
- users.role_id added (nullable) ✓
|
||||
- role_permissions.role_id added (nullable) ✓
|
||||
- Legacy enum columns retained for backward compatibility ✓
|
||||
' AS migration_status;
|
||||
44
migrations/009_create_donations.sql
Normal file
44
migrations/009_create_donations.sql
Normal file
@@ -0,0 +1,44 @@
|
||||
-- Migration: Create Donations Table
|
||||
-- Description: Adds donations table to track both member and public donations
|
||||
-- Date: 2025-12-17
|
||||
-- CRITICAL: Fixes data loss issue where standalone donations weren't being saved
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Create donation type enum
|
||||
CREATE TYPE donationtype AS ENUM ('member', 'public');
|
||||
|
||||
-- Create donation status enum
|
||||
CREATE TYPE donationstatus AS ENUM ('pending', 'completed', 'failed');
|
||||
|
||||
-- Create donations table
|
||||
CREATE TABLE donations (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
amount_cents INTEGER NOT NULL,
|
||||
donation_type donationtype NOT NULL DEFAULT 'public',
|
||||
status donationstatus NOT NULL DEFAULT 'pending',
|
||||
user_id UUID REFERENCES users(id),
|
||||
donor_email VARCHAR,
|
||||
donor_name VARCHAR,
|
||||
stripe_checkout_session_id VARCHAR,
|
||||
stripe_payment_intent_id VARCHAR,
|
||||
payment_method VARCHAR,
|
||||
notes TEXT,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE
|
||||
);
|
||||
|
||||
-- Create indexes for performance
|
||||
CREATE INDEX idx_donation_user ON donations(user_id);
|
||||
CREATE INDEX idx_donation_type ON donations(donation_type);
|
||||
CREATE INDEX idx_donation_status ON donations(status);
|
||||
CREATE INDEX idx_donation_created ON donations(created_at);
|
||||
|
||||
-- Add comment
|
||||
COMMENT ON TABLE donations IS 'Tracks both member and public one-time donations';
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- Verify migration
|
||||
SELECT 'Donations table created successfully' as status;
|
||||
SELECT COUNT(*) as initial_count FROM donations;
|
||||
40
migrations/010_add_rejection_fields.sql
Normal file
40
migrations/010_add_rejection_fields.sql
Normal file
@@ -0,0 +1,40 @@
|
||||
-- Migration: Add Rejection Fields to Users Table
|
||||
-- Description: Adds rejection tracking fields and rejected status to UserStatus enum
|
||||
-- Date: 2025-12-18
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Add 'rejected' value to UserStatus enum if it doesn't exist
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_enum
|
||||
WHERE enumlabel = 'rejected'
|
||||
AND enumtypid = (SELECT oid FROM pg_type WHERE typname = 'userstatus')
|
||||
) THEN
|
||||
ALTER TYPE userstatus ADD VALUE 'rejected';
|
||||
RAISE NOTICE 'Added rejected to userstatus enum';
|
||||
ELSE
|
||||
RAISE NOTICE 'rejected already exists in userstatus enum';
|
||||
END IF;
|
||||
END$$;
|
||||
|
||||
-- Add rejection tracking fields to users table
|
||||
ALTER TABLE users
|
||||
ADD COLUMN IF NOT EXISTS rejection_reason TEXT,
|
||||
ADD COLUMN IF NOT EXISTS rejected_at TIMESTAMP WITH TIME ZONE,
|
||||
ADD COLUMN IF NOT EXISTS rejected_by UUID REFERENCES users(id);
|
||||
|
||||
-- Add comments for documentation
|
||||
COMMENT ON COLUMN users.rejection_reason IS 'Reason provided when application was rejected';
|
||||
COMMENT ON COLUMN users.rejected_at IS 'Timestamp when application was rejected';
|
||||
COMMENT ON COLUMN users.rejected_by IS 'Admin who rejected the application';
|
||||
|
||||
-- Create index on rejected_at for filtering rejected users
|
||||
CREATE INDEX IF NOT EXISTS idx_users_rejected_at ON users(rejected_at) WHERE rejected_at IS NOT NULL;
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- Verify migration
|
||||
SELECT 'Rejection fields added successfully' AS status;
|
||||
SELECT COUNT(*) AS rejected_users_count FROM users WHERE status = 'rejected';
|
||||
153
migrations/011_wordpress_import_enhancements.sql
Normal file
153
migrations/011_wordpress_import_enhancements.sql
Normal file
@@ -0,0 +1,153 @@
|
||||
-- Migration: 011_wordpress_import_enhancements
|
||||
-- Purpose: Enhance ImportJob and User tables for WordPress CSV import feature
|
||||
-- Date: 2025-12-24
|
||||
-- Author: Claude Code
|
||||
|
||||
-- ============================================================================
|
||||
-- PART 1: Enhance ImportJob Table
|
||||
-- ============================================================================
|
||||
|
||||
-- Add new columns to import_jobs table for WordPress import tracking
|
||||
ALTER TABLE import_jobs
|
||||
ADD COLUMN IF NOT EXISTS field_mapping JSONB DEFAULT '{}'::jsonb,
|
||||
ADD COLUMN IF NOT EXISTS wordpress_metadata JSONB DEFAULT '{}'::jsonb,
|
||||
ADD COLUMN IF NOT EXISTS imported_user_ids JSONB DEFAULT '[]'::jsonb,
|
||||
ADD COLUMN IF NOT EXISTS rollback_at TIMESTAMP WITH TIME ZONE,
|
||||
ADD COLUMN IF NOT EXISTS rollback_by UUID REFERENCES users(id);
|
||||
|
||||
-- Add comments for documentation
|
||||
COMMENT ON COLUMN import_jobs.field_mapping IS 'Maps CSV columns to database fields: {csv_column: db_field}';
|
||||
COMMENT ON COLUMN import_jobs.wordpress_metadata IS 'Stores preview data, validation results, and WordPress-specific metadata';
|
||||
COMMENT ON COLUMN import_jobs.imported_user_ids IS 'Array of user IDs created from this import job (for rollback)';
|
||||
COMMENT ON COLUMN import_jobs.rollback_at IS 'Timestamp when this import was rolled back';
|
||||
COMMENT ON COLUMN import_jobs.rollback_by IS 'Admin user who performed the rollback';
|
||||
|
||||
-- ============================================================================
|
||||
-- PART 2: Add New ImportJob Status Values
|
||||
-- ============================================================================
|
||||
|
||||
-- Add new status values for import workflow
|
||||
-- Note: PostgreSQL enum values cannot be added conditionally, so we use DO block
|
||||
DO $$
|
||||
BEGIN
|
||||
-- Add 'validating' status if it doesn't exist
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_enum WHERE enumlabel = 'validating' AND enumtypid = (SELECT oid FROM pg_type WHERE typname = 'importjobstatus')) THEN
|
||||
ALTER TYPE importjobstatus ADD VALUE 'validating';
|
||||
END IF;
|
||||
|
||||
-- Add 'preview_ready' status if it doesn't exist
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_enum WHERE enumlabel = 'preview_ready' AND enumtypid = (SELECT oid FROM pg_type WHERE typname = 'importjobstatus')) THEN
|
||||
ALTER TYPE importjobstatus ADD VALUE 'preview_ready';
|
||||
END IF;
|
||||
|
||||
-- Add 'rolled_back' status if it doesn't exist
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_enum WHERE enumlabel = 'rolled_back' AND enumtypid = (SELECT oid FROM pg_type WHERE typname = 'importjobstatus')) THEN
|
||||
ALTER TYPE importjobstatus ADD VALUE 'rolled_back';
|
||||
END IF;
|
||||
END$$;
|
||||
|
||||
-- ============================================================================
|
||||
-- PART 3: Enhance User Table for Import Tracking
|
||||
-- ============================================================================
|
||||
|
||||
-- Add columns to track import source and WordPress metadata
|
||||
ALTER TABLE users
|
||||
ADD COLUMN IF NOT EXISTS import_source VARCHAR(50),
|
||||
ADD COLUMN IF NOT EXISTS import_job_id UUID REFERENCES import_jobs(id),
|
||||
ADD COLUMN IF NOT EXISTS wordpress_user_id BIGINT,
|
||||
ADD COLUMN IF NOT EXISTS wordpress_registered_date TIMESTAMP WITH TIME ZONE;
|
||||
|
||||
-- Add comments for documentation
|
||||
COMMENT ON COLUMN users.import_source IS 'Source of user creation: wordpress, manual, registration, etc.';
|
||||
COMMENT ON COLUMN users.import_job_id IS 'Reference to import job that created this user (if imported)';
|
||||
COMMENT ON COLUMN users.wordpress_user_id IS 'Original WordPress user ID for reference';
|
||||
COMMENT ON COLUMN users.wordpress_registered_date IS 'Original WordPress registration date';
|
||||
|
||||
-- ============================================================================
|
||||
-- PART 4: Create Indexes for Performance
|
||||
-- ============================================================================
|
||||
|
||||
-- Index for querying users by import job (used in rollback)
|
||||
CREATE INDEX IF NOT EXISTS idx_users_import_job
|
||||
ON users(import_job_id)
|
||||
WHERE import_job_id IS NOT NULL;
|
||||
|
||||
-- Index for querying users by import source
|
||||
CREATE INDEX IF NOT EXISTS idx_users_import_source
|
||||
ON users(import_source)
|
||||
WHERE import_source IS NOT NULL;
|
||||
|
||||
-- Index for querying import jobs by status
|
||||
CREATE INDEX IF NOT EXISTS idx_import_jobs_status
|
||||
ON import_jobs(status);
|
||||
|
||||
-- ============================================================================
|
||||
-- PART 5: Create Rollback Audit Table (Optional but Recommended)
|
||||
-- ============================================================================
|
||||
|
||||
-- Create table to track import rollback history for audit purposes
|
||||
CREATE TABLE IF NOT EXISTS import_rollback_audit (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
import_job_id UUID NOT NULL REFERENCES import_jobs(id),
|
||||
rolled_back_by UUID NOT NULL REFERENCES users(id),
|
||||
rolled_back_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
|
||||
deleted_user_count INTEGER NOT NULL,
|
||||
deleted_user_ids JSONB NOT NULL,
|
||||
reason TEXT,
|
||||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Index for querying rollback history
|
||||
CREATE INDEX IF NOT EXISTS idx_rollback_audit_import_job
|
||||
ON import_rollback_audit(import_job_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_rollback_audit_rolled_back_at
|
||||
ON import_rollback_audit(rolled_back_at DESC);
|
||||
|
||||
COMMENT ON TABLE import_rollback_audit IS 'Audit trail for import rollback operations';
|
||||
|
||||
-- ============================================================================
|
||||
-- VERIFICATION QUERIES (Run after migration to verify)
|
||||
-- ============================================================================
|
||||
|
||||
-- Verify ImportJob columns exist
|
||||
-- SELECT column_name, data_type
|
||||
-- FROM information_schema.columns
|
||||
-- WHERE table_name = 'import_jobs'
|
||||
-- AND column_name IN ('field_mapping', 'wordpress_metadata', 'imported_user_ids', 'rollback_at', 'rollback_by');
|
||||
|
||||
-- Verify User columns exist
|
||||
-- SELECT column_name, data_type
|
||||
-- FROM information_schema.columns
|
||||
-- WHERE table_name = 'users'
|
||||
-- AND column_name IN ('import_source', 'import_job_id', 'wordpress_user_id', 'wordpress_registered_date');
|
||||
|
||||
-- Verify new enum values exist
|
||||
-- SELECT enumlabel FROM pg_enum WHERE enumtypid = (SELECT oid FROM pg_type WHERE typname = 'importjobstatus') ORDER BY enumlabel;
|
||||
|
||||
-- Verify indexes exist
|
||||
-- SELECT indexname, indexdef FROM pg_indexes WHERE tablename IN ('users', 'import_jobs', 'import_rollback_audit') ORDER BY indexname;
|
||||
|
||||
-- ============================================================================
|
||||
-- ROLLBACK SCRIPT (if needed)
|
||||
-- ============================================================================
|
||||
|
||||
-- WARNING: This will drop all columns and data related to WordPress imports
|
||||
-- USE WITH EXTREME CAUTION
|
||||
|
||||
-- DROP TABLE IF EXISTS import_rollback_audit CASCADE;
|
||||
-- DROP INDEX IF EXISTS idx_users_import_job;
|
||||
-- DROP INDEX IF EXISTS idx_users_import_source;
|
||||
-- DROP INDEX IF EXISTS idx_import_jobs_status;
|
||||
-- ALTER TABLE users DROP COLUMN IF EXISTS import_source;
|
||||
-- ALTER TABLE users DROP COLUMN IF EXISTS import_job_id;
|
||||
-- ALTER TABLE users DROP COLUMN IF EXISTS wordpress_user_id;
|
||||
-- ALTER TABLE users DROP COLUMN IF EXISTS wordpress_registered_date;
|
||||
-- ALTER TABLE import_jobs DROP COLUMN IF EXISTS field_mapping;
|
||||
-- ALTER TABLE import_jobs DROP COLUMN IF EXISTS wordpress_metadata;
|
||||
-- ALTER TABLE import_jobs DROP COLUMN IF EXISTS imported_user_ids;
|
||||
-- ALTER TABLE import_jobs DROP COLUMN IF EXISTS rollback_at;
|
||||
-- ALTER TABLE import_jobs DROP COLUMN IF EXISTS rollback_by;
|
||||
|
||||
-- Note: Cannot easily remove enum values from importjobstatus type without recreating it
|
||||
-- Manual intervention required if rollback of enum values is needed
|
||||
@@ -136,3 +136,211 @@ DROP TABLE IF EXISTS financial_reports;
|
||||
DROP TABLE IF EXISTS bylaws_documents;
|
||||
DROP TABLE IF EXISTS storage_usage;
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Running Phase 1-4.5 Migrations (December 2025)
|
||||
|
||||
These migrations add features from client feedback phases 1-4.5:
|
||||
- Member Since field for imported users
|
||||
- Approval → Validation terminology update
|
||||
- Terms of Service acceptance tracking
|
||||
- Reminder email tracking for admin dashboard
|
||||
|
||||
### Quick Start
|
||||
|
||||
Run all migrations at once:
|
||||
|
||||
```bash
|
||||
cd backend/migrations
|
||||
psql $DATABASE_URL -f run_all_migrations.sql
|
||||
```
|
||||
|
||||
### Individual Migration Files
|
||||
|
||||
The migrations are numbered in the order they should be run:
|
||||
|
||||
1. **001_add_member_since_field.sql** - Adds editable `member_since` field for imported users
|
||||
2. **002_rename_approval_to_validation.sql** - Updates terminology from "approval" to "validation"
|
||||
3. **003_add_tos_acceptance.sql** - Adds Terms of Service acceptance tracking
|
||||
4. **004_add_reminder_tracking_fields.sql** - Adds reminder email tracking for admin dashboard
|
||||
|
||||
### Run Individual Migrations
|
||||
|
||||
```bash
|
||||
cd backend/migrations
|
||||
|
||||
# Run migrations one by one
|
||||
psql $DATABASE_URL -f 001_add_member_since_field.sql
|
||||
psql $DATABASE_URL -f 002_rename_approval_to_validation.sql
|
||||
psql $DATABASE_URL -f 003_add_tos_acceptance.sql
|
||||
psql $DATABASE_URL -f 004_add_reminder_tracking_fields.sql
|
||||
```
|
||||
|
||||
### Using psql Interactive Mode
|
||||
|
||||
```bash
|
||||
# Connect to your database
|
||||
psql $DATABASE_URL
|
||||
|
||||
# Inside psql, run:
|
||||
\i backend/migrations/001_add_member_since_field.sql
|
||||
\i backend/migrations/002_rename_approval_to_validation.sql
|
||||
\i backend/migrations/003_add_tos_acceptance.sql
|
||||
\i backend/migrations/004_add_reminder_tracking_fields.sql
|
||||
```
|
||||
|
||||
### What Each Migration Adds
|
||||
|
||||
**Migration 001 - Member Since Field:**
|
||||
- Adds `member_since` column (nullable timestamp)
|
||||
- Backfills active members with their `created_at` date
|
||||
- Allows admins to edit dates for imported users
|
||||
|
||||
**Migration 002 - Approval → Validation Terminology:**
|
||||
- Updates `pending_approval` → `pending_validation`
|
||||
- Updates `pre_approved` → `pre_validated`
|
||||
- Aligns database with client's terminology requirements
|
||||
|
||||
**Migration 003 - ToS Acceptance:**
|
||||
- Adds `accepts_tos` boolean field (default false)
|
||||
- Adds `tos_accepted_at` timestamp field
|
||||
- Backfills existing users as having accepted ToS
|
||||
|
||||
**Migration 004 - Reminder Tracking:**
|
||||
- Adds 8 fields to track reminder emails:
|
||||
- `email_verification_reminders_sent` + `last_email_verification_reminder_at`
|
||||
- `event_attendance_reminders_sent` + `last_event_attendance_reminder_at`
|
||||
- `payment_reminders_sent` + `last_payment_reminder_at`
|
||||
- `renewal_reminders_sent` + `last_renewal_reminder_at`
|
||||
- Enables admin dashboard to show users needing personal outreach
|
||||
|
||||
### Verification
|
||||
|
||||
After running migrations, verify they completed successfully:
|
||||
|
||||
```sql
|
||||
-- Check if new columns exist
|
||||
SELECT column_name, data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'users'
|
||||
AND column_name IN (
|
||||
'member_since',
|
||||
'accepts_tos',
|
||||
'tos_accepted_at',
|
||||
'email_verification_reminders_sent',
|
||||
'last_email_verification_reminder_at',
|
||||
'event_attendance_reminders_sent',
|
||||
'last_event_attendance_reminder_at',
|
||||
'payment_reminders_sent',
|
||||
'last_payment_reminder_at',
|
||||
'renewal_reminders_sent',
|
||||
'last_renewal_reminder_at'
|
||||
)
|
||||
ORDER BY column_name;
|
||||
|
||||
-- Check status values were updated
|
||||
SELECT status, COUNT(*)
|
||||
FROM users
|
||||
GROUP BY status;
|
||||
```
|
||||
|
||||
### Rollback Phase 1-4.5 Migrations (If Needed)
|
||||
|
||||
```sql
|
||||
-- Rollback 004: Remove reminder tracking fields
|
||||
ALTER TABLE users
|
||||
DROP COLUMN IF EXISTS email_verification_reminders_sent,
|
||||
DROP COLUMN IF EXISTS last_email_verification_reminder_at,
|
||||
DROP COLUMN IF EXISTS event_attendance_reminders_sent,
|
||||
DROP COLUMN IF EXISTS last_event_attendance_reminder_at,
|
||||
DROP COLUMN IF EXISTS payment_reminders_sent,
|
||||
DROP COLUMN IF EXISTS last_payment_reminder_at,
|
||||
DROP COLUMN IF EXISTS renewal_reminders_sent,
|
||||
DROP COLUMN IF EXISTS last_renewal_reminder_at;
|
||||
|
||||
-- Rollback 003: Remove ToS fields
|
||||
ALTER TABLE users
|
||||
DROP COLUMN IF EXISTS accepts_tos,
|
||||
DROP COLUMN IF EXISTS tos_accepted_at;
|
||||
|
||||
-- Rollback 002: Revert validation to approval
|
||||
UPDATE users SET status = 'pending_approval' WHERE status = 'pending_validation';
|
||||
UPDATE users SET status = 'pre_approved' WHERE status = 'pre_validated';
|
||||
|
||||
-- Rollback 001: Remove member_since field
|
||||
ALTER TABLE users DROP COLUMN IF EXISTS member_since;
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Running Phase RBAC Migration (December 2025)
|
||||
|
||||
This migration adds RBAC permission management, user invitations, and CSV import tracking capabilities.
|
||||
|
||||
### Quick Start
|
||||
|
||||
```bash
|
||||
cd backend/migrations
|
||||
psql $DATABASE_URL -f 005_add_rbac_and_invitations.sql
|
||||
```
|
||||
|
||||
### What This Migration Adds
|
||||
|
||||
**UserRole Enum Update:**
|
||||
- Adds `superadmin` role to UserRole enum
|
||||
|
||||
**New Tables:**
|
||||
1. **permissions** - Granular permission definitions (60+ permissions)
|
||||
2. **role_permissions** - Junction table linking roles to permissions
|
||||
3. **user_invitations** - Email-based invitation tracking with tokens
|
||||
4. **import_jobs** - CSV import job tracking with error logging
|
||||
|
||||
**New Enum Types:**
|
||||
- `invitationstatus` (pending, accepted, expired, revoked)
|
||||
- `importjobstatus` (processing, completed, failed, partial)
|
||||
|
||||
### Verification
|
||||
|
||||
After running the migration, verify it completed successfully:
|
||||
|
||||
```sql
|
||||
-- Check if superadmin role exists
|
||||
SELECT enumlabel FROM pg_enum
|
||||
WHERE enumtypid = 'userrole'::regtype
|
||||
ORDER BY enumlabel;
|
||||
|
||||
-- Check if new tables exist
|
||||
SELECT table_name FROM information_schema.tables
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name IN ('permissions', 'role_permissions', 'user_invitations', 'import_jobs')
|
||||
ORDER BY table_name;
|
||||
|
||||
-- Check table structures
|
||||
\d permissions
|
||||
\d role_permissions
|
||||
\d user_invitations
|
||||
\d import_jobs
|
||||
```
|
||||
|
||||
### Next Steps After Migration
|
||||
|
||||
1. **Seed Permissions**: Run `permissions_seed.py` to populate default permissions
|
||||
2. **Upgrade Admin to Superadmin**: Update existing admin users to superadmin role
|
||||
3. **Assign Permissions**: Configure permissions for admin, member, and guest roles
|
||||
|
||||
### Rollback (If Needed)
|
||||
|
||||
```sql
|
||||
-- Remove all RBAC tables and enums
|
||||
DROP TABLE IF EXISTS import_jobs CASCADE;
|
||||
DROP TABLE IF EXISTS user_invitations CASCADE;
|
||||
DROP TABLE IF EXISTS role_permissions CASCADE;
|
||||
DROP TABLE IF EXISTS permissions CASCADE;
|
||||
DROP TYPE IF EXISTS importjobstatus;
|
||||
DROP TYPE IF EXISTS invitationstatus;
|
||||
|
||||
-- Note: Cannot remove 'superadmin' from UserRole enum without recreating
|
||||
-- the entire enum. Only rollback if no users have the superadmin role.
|
||||
```
|
||||
|
||||
|
||||
394
migrations/create_tables_only.sql
Normal file
394
migrations/create_tables_only.sql
Normal file
@@ -0,0 +1,394 @@
|
||||
-- ============================================================================
|
||||
-- Create Tables Only (ENUMs already exist)
|
||||
-- Run this when ENUMs exist but tables don't
|
||||
-- ============================================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ============================================================================
|
||||
-- STEP 1: Core Tables
|
||||
-- ============================================================================
|
||||
|
||||
-- Users table
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
email VARCHAR(255) UNIQUE NOT NULL,
|
||||
password_hash VARCHAR(255) NOT NULL,
|
||||
first_name VARCHAR(100) NOT NULL,
|
||||
last_name VARCHAR(100) NOT NULL,
|
||||
phone VARCHAR(20),
|
||||
address TEXT,
|
||||
city VARCHAR(100),
|
||||
state VARCHAR(2),
|
||||
zipcode VARCHAR(10),
|
||||
date_of_birth DATE,
|
||||
|
||||
-- Profile
|
||||
profile_image_url TEXT,
|
||||
bio TEXT,
|
||||
interests TEXT,
|
||||
|
||||
-- Partner Information
|
||||
partner_first_name VARCHAR(100),
|
||||
partner_last_name VARCHAR(100),
|
||||
partner_is_member BOOLEAN DEFAULT FALSE,
|
||||
partner_plan_to_become_member BOOLEAN DEFAULT FALSE,
|
||||
|
||||
-- Referral
|
||||
referred_by_member_name VARCHAR(200),
|
||||
|
||||
-- Newsletter Preferences
|
||||
newsletter_subscribed BOOLEAN DEFAULT TRUE,
|
||||
newsletter_publish_name BOOLEAN DEFAULT FALSE,
|
||||
newsletter_publish_photo BOOLEAN DEFAULT FALSE,
|
||||
newsletter_publish_birthday BOOLEAN DEFAULT FALSE,
|
||||
newsletter_publish_none BOOLEAN DEFAULT FALSE,
|
||||
|
||||
-- Volunteer & Scholarship
|
||||
volunteer_interests TEXT,
|
||||
scholarship_requested BOOLEAN DEFAULT FALSE,
|
||||
|
||||
-- Directory
|
||||
show_in_directory BOOLEAN DEFAULT TRUE,
|
||||
|
||||
-- Lead Sources (JSON array)
|
||||
lead_sources JSONB DEFAULT '[]'::jsonb,
|
||||
|
||||
-- Status & Role
|
||||
status userstatus DEFAULT 'pending_email' NOT NULL,
|
||||
role userrole DEFAULT 'guest' NOT NULL,
|
||||
role_id UUID,
|
||||
|
||||
-- Rejection Tracking
|
||||
rejection_reason TEXT,
|
||||
rejected_at TIMESTAMP WITH TIME ZONE,
|
||||
rejected_by UUID REFERENCES users(id),
|
||||
|
||||
-- Membership
|
||||
member_since DATE,
|
||||
accepts_tos BOOLEAN DEFAULT FALSE,
|
||||
tos_accepted_at TIMESTAMP WITH TIME ZONE,
|
||||
|
||||
-- Reminder Tracking (from migration 004)
|
||||
email_verification_reminders_sent INTEGER DEFAULT 0 NOT NULL,
|
||||
last_email_verification_reminder_at TIMESTAMP WITH TIME ZONE,
|
||||
event_attendance_reminders_sent INTEGER DEFAULT 0 NOT NULL,
|
||||
last_event_attendance_reminder_at TIMESTAMP WITH TIME ZONE,
|
||||
payment_reminders_sent INTEGER DEFAULT 0 NOT NULL,
|
||||
last_payment_reminder_at TIMESTAMP WITH TIME ZONE,
|
||||
renewal_reminders_sent INTEGER DEFAULT 0 NOT NULL,
|
||||
last_renewal_reminder_at TIMESTAMP WITH TIME ZONE,
|
||||
|
||||
-- WordPress Import Tracking
|
||||
import_source VARCHAR(50),
|
||||
import_job_id UUID,
|
||||
wordpress_user_id BIGINT,
|
||||
wordpress_registered_date TIMESTAMP WITH TIME ZONE,
|
||||
|
||||
-- Authentication
|
||||
email_verified BOOLEAN DEFAULT FALSE,
|
||||
email_verification_token VARCHAR(255),
|
||||
email_verification_expires TIMESTAMP WITH TIME ZONE,
|
||||
password_reset_token VARCHAR(255),
|
||||
password_reset_expires TIMESTAMP WITH TIME ZONE,
|
||||
force_password_change BOOLEAN DEFAULT FALSE,
|
||||
|
||||
-- Timestamps
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Events table
|
||||
CREATE TABLE IF NOT EXISTS events (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
location VARCHAR(255),
|
||||
start_at TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
end_at TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
capacity INTEGER,
|
||||
published BOOLEAN DEFAULT FALSE,
|
||||
calendar_uid VARCHAR(255) UNIQUE,
|
||||
created_by UUID REFERENCES users(id),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Event RSVPs
|
||||
CREATE TABLE IF NOT EXISTS event_rsvps (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
event_id UUID NOT NULL REFERENCES events(id) ON DELETE CASCADE,
|
||||
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
rsvp_status rsvpstatus NOT NULL,
|
||||
attended BOOLEAN DEFAULT FALSE,
|
||||
attended_at TIMESTAMP WITH TIME ZONE,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
UNIQUE(event_id, user_id)
|
||||
);
|
||||
|
||||
-- Event Gallery
|
||||
CREATE TABLE IF NOT EXISTS event_galleries (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
event_id UUID NOT NULL REFERENCES events(id) ON DELETE CASCADE,
|
||||
image_url TEXT NOT NULL,
|
||||
caption TEXT,
|
||||
uploaded_by UUID NOT NULL REFERENCES users(id),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Import Jobs
|
||||
CREATE TABLE IF NOT EXISTS import_jobs (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
filename VARCHAR(255) NOT NULL,
|
||||
file_key VARCHAR(255),
|
||||
total_rows INTEGER NOT NULL,
|
||||
processed_rows INTEGER DEFAULT 0,
|
||||
successful_rows INTEGER DEFAULT 0,
|
||||
failed_rows INTEGER DEFAULT 0,
|
||||
status importjobstatus DEFAULT 'processing' NOT NULL,
|
||||
errors JSONB DEFAULT '[]'::jsonb,
|
||||
|
||||
-- WordPress import enhancements
|
||||
field_mapping JSONB DEFAULT '{}'::jsonb,
|
||||
wordpress_metadata JSONB DEFAULT '{}'::jsonb,
|
||||
imported_user_ids JSONB DEFAULT '[]'::jsonb,
|
||||
rollback_at TIMESTAMP WITH TIME ZONE,
|
||||
rollback_by UUID REFERENCES users(id),
|
||||
|
||||
imported_by UUID NOT NULL REFERENCES users(id),
|
||||
started_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
completed_at TIMESTAMP WITH TIME ZONE
|
||||
);
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- ============================================================================
|
||||
-- STEP 2: Subscription & Payment Tables
|
||||
-- ============================================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS subscription_plans (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
name VARCHAR(100) NOT NULL,
|
||||
description TEXT,
|
||||
price_cents INTEGER NOT NULL,
|
||||
billing_cycle VARCHAR(20) NOT NULL,
|
||||
stripe_price_id VARCHAR(255),
|
||||
custom_cycle_enabled BOOLEAN DEFAULT FALSE,
|
||||
minimum_price_cents INTEGER DEFAULT 0,
|
||||
allow_donation BOOLEAN DEFAULT FALSE,
|
||||
is_active BOOLEAN DEFAULT TRUE,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS subscriptions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
plan_id UUID NOT NULL REFERENCES subscription_plans(id),
|
||||
stripe_subscription_id VARCHAR(255),
|
||||
stripe_customer_id VARCHAR(255),
|
||||
base_subscription_cents INTEGER NOT NULL,
|
||||
donation_cents INTEGER DEFAULT 0,
|
||||
status subscriptionstatus DEFAULT 'active' NOT NULL,
|
||||
current_period_start TIMESTAMP WITH TIME ZONE,
|
||||
current_period_end TIMESTAMP WITH TIME ZONE,
|
||||
cancel_at_period_end BOOLEAN DEFAULT FALSE,
|
||||
canceled_at TIMESTAMP WITH TIME ZONE,
|
||||
manual_payment BOOLEAN DEFAULT FALSE,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS donations (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
user_id UUID REFERENCES users(id),
|
||||
amount_cents INTEGER NOT NULL,
|
||||
donation_type donationtype NOT NULL,
|
||||
status donationstatus DEFAULT 'pending' NOT NULL,
|
||||
stripe_payment_intent_id VARCHAR(255),
|
||||
donor_name VARCHAR(200),
|
||||
donor_email VARCHAR(255),
|
||||
message TEXT,
|
||||
is_anonymous BOOLEAN DEFAULT FALSE,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
completed_at TIMESTAMP WITH TIME ZONE
|
||||
);
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- ============================================================================
|
||||
-- STEP 3: RBAC Tables
|
||||
-- ============================================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS permissions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
code VARCHAR(100) UNIQUE NOT NULL,
|
||||
name VARCHAR(200) NOT NULL,
|
||||
description TEXT,
|
||||
module VARCHAR(50),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS roles (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
code VARCHAR(50) UNIQUE NOT NULL,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
description TEXT,
|
||||
is_system_role BOOLEAN DEFAULT FALSE,
|
||||
created_by UUID REFERENCES users(id),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS role_permissions (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
role VARCHAR(50),
|
||||
role_id UUID REFERENCES roles(id) ON DELETE CASCADE,
|
||||
permission_id UUID NOT NULL REFERENCES permissions(id) ON DELETE CASCADE,
|
||||
created_by UUID REFERENCES users(id),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS user_invitations (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
email VARCHAR(255) NOT NULL,
|
||||
role userrole NOT NULL,
|
||||
token VARCHAR(255) UNIQUE NOT NULL,
|
||||
invited_by UUID NOT NULL REFERENCES users(id),
|
||||
invited_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
expires_at TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
accepted_by UUID REFERENCES users(id),
|
||||
accepted_at TIMESTAMP WITH TIME ZONE,
|
||||
status invitationstatus DEFAULT 'pending' NOT NULL
|
||||
);
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- ============================================================================
|
||||
-- STEP 4: Document Management Tables
|
||||
-- ============================================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS newsletter_archives (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
description TEXT,
|
||||
published_date TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
document_url TEXT NOT NULL,
|
||||
document_type VARCHAR(50) NOT NULL,
|
||||
created_by UUID NOT NULL REFERENCES users(id),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS financial_reports (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
year INTEGER NOT NULL,
|
||||
title VARCHAR(255) NOT NULL,
|
||||
document_url TEXT NOT NULL,
|
||||
document_type VARCHAR(50) NOT NULL,
|
||||
created_by UUID NOT NULL REFERENCES users(id),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS bylaws_documents (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
title VARCHAR(255) NOT NULL,
|
||||
version VARCHAR(50) NOT NULL,
|
||||
effective_date TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
document_url TEXT NOT NULL,
|
||||
document_type VARCHAR(50) NOT NULL,
|
||||
is_current BOOLEAN DEFAULT FALSE,
|
||||
created_by UUID NOT NULL REFERENCES users(id),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- ============================================================================
|
||||
-- STEP 5: System Tables
|
||||
-- ============================================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS storage_usage (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
total_bytes_used BIGINT DEFAULT 0,
|
||||
max_bytes_allowed BIGINT,
|
||||
last_calculated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS import_rollback_audit (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
import_job_id UUID NOT NULL REFERENCES import_jobs(id),
|
||||
rolled_back_by UUID NOT NULL REFERENCES users(id),
|
||||
rolled_back_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
deleted_user_count INTEGER NOT NULL,
|
||||
deleted_user_ids JSONB NOT NULL,
|
||||
reason TEXT,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Initialize storage_usage with default row
|
||||
INSERT INTO storage_usage (id, total_bytes_used, max_bytes_allowed)
|
||||
VALUES (gen_random_uuid(), 0, 107374182400) -- 100GB limit
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- ============================================================================
|
||||
-- STEP 6: Create Indexes
|
||||
-- ============================================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Users indexes
|
||||
CREATE INDEX IF NOT EXISTS idx_users_email ON users(email);
|
||||
CREATE INDEX IF NOT EXISTS idx_users_status ON users(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_users_role ON users(role);
|
||||
CREATE INDEX IF NOT EXISTS idx_users_created_at ON users(created_at);
|
||||
|
||||
-- Events indexes
|
||||
CREATE INDEX IF NOT EXISTS idx_events_start_at ON events(start_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_events_published ON events(published);
|
||||
CREATE INDEX IF NOT EXISTS idx_events_created_by ON events(created_by);
|
||||
|
||||
-- Event RSVPs indexes
|
||||
CREATE INDEX IF NOT EXISTS idx_event_rsvps_event_id ON event_rsvps(event_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_event_rsvps_user_id ON event_rsvps(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_event_rsvps_attended ON event_rsvps(attended);
|
||||
|
||||
-- Subscriptions indexes
|
||||
CREATE INDEX IF NOT EXISTS idx_subscriptions_user_id ON subscriptions(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_subscriptions_status ON subscriptions(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_subscriptions_stripe_customer_id ON subscriptions(stripe_customer_id);
|
||||
|
||||
-- Permissions indexes
|
||||
CREATE INDEX IF NOT EXISTS ix_permissions_code ON permissions(code);
|
||||
CREATE INDEX IF NOT EXISTS ix_permissions_module ON permissions(module);
|
||||
|
||||
-- Roles indexes
|
||||
CREATE INDEX IF NOT EXISTS ix_roles_code ON roles(code);
|
||||
|
||||
-- Role permissions indexes
|
||||
CREATE INDEX IF NOT EXISTS ix_role_permissions_role ON role_permissions(role);
|
||||
CREATE INDEX IF NOT EXISTS ix_role_permissions_role_id ON role_permissions(role_id);
|
||||
|
||||
-- User invitations indexes
|
||||
CREATE INDEX IF NOT EXISTS ix_user_invitations_email ON user_invitations(email);
|
||||
CREATE INDEX IF NOT EXISTS ix_user_invitations_token ON user_invitations(token);
|
||||
|
||||
COMMIT;
|
||||
|
||||
\echo '✅ All tables created successfully!'
|
||||
\echo 'Run: psql ... -c "\dt" to verify'
|
||||
80
migrations/diagnose_database.sql
Normal file
80
migrations/diagnose_database.sql
Normal file
@@ -0,0 +1,80 @@
|
||||
-- ============================================================================
|
||||
-- Database Diagnostic Script
|
||||
-- Run this to check what exists in your database
|
||||
-- ============================================================================
|
||||
|
||||
\echo '=== CHECKING ENUMS ==='
|
||||
SELECT
|
||||
t.typname as enum_name,
|
||||
string_agg(e.enumlabel, ', ' ORDER BY e.enumsortorder) as values
|
||||
FROM pg_type t
|
||||
JOIN pg_enum e ON t.oid = e.enumtypid
|
||||
WHERE t.typname IN (
|
||||
'userstatus', 'userrole', 'rsvpstatus', 'subscriptionstatus',
|
||||
'donationtype', 'donationstatus', 'invitationstatus', 'importjobstatus'
|
||||
)
|
||||
GROUP BY t.typname
|
||||
ORDER BY t.typname;
|
||||
|
||||
\echo ''
|
||||
\echo '=== CHECKING TABLES ==='
|
||||
SELECT
|
||||
schemaname,
|
||||
tablename
|
||||
FROM pg_tables
|
||||
WHERE schemaname = 'public'
|
||||
ORDER BY tablename;
|
||||
|
||||
\echo ''
|
||||
\echo '=== CHECKING USERS TABLE STRUCTURE ==='
|
||||
SELECT
|
||||
column_name,
|
||||
data_type,
|
||||
is_nullable,
|
||||
column_default
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'users'
|
||||
ORDER BY ordinal_position;
|
||||
|
||||
\echo ''
|
||||
\echo '=== CHECKING FOR CRITICAL FIELDS ==='
|
||||
\echo 'Checking if reminder tracking fields exist...'
|
||||
SELECT EXISTS (
|
||||
SELECT 1
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'users'
|
||||
AND column_name = 'email_verification_reminders_sent'
|
||||
) as has_reminder_fields;
|
||||
|
||||
\echo ''
|
||||
\echo 'Checking if accepts_tos field exists (should be accepts_tos, not tos_accepted)...'
|
||||
SELECT column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'users'
|
||||
AND column_name IN ('accepts_tos', 'tos_accepted');
|
||||
|
||||
\echo ''
|
||||
\echo 'Checking if WordPress import fields exist...'
|
||||
SELECT EXISTS (
|
||||
SELECT 1
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'users'
|
||||
AND column_name = 'import_source'
|
||||
) as has_import_fields;
|
||||
|
||||
\echo ''
|
||||
\echo '=== CHECKING IMPORT_JOBS TABLE ==='
|
||||
SELECT column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'import_jobs'
|
||||
ORDER BY ordinal_position;
|
||||
|
||||
\echo ''
|
||||
\echo '=== SUMMARY ==='
|
||||
SELECT
|
||||
(SELECT COUNT(*) FROM pg_type WHERE typname IN (
|
||||
'userstatus', 'userrole', 'rsvpstatus', 'subscriptionstatus',
|
||||
'donationtype', 'donationstatus', 'invitationstatus', 'importjobstatus'
|
||||
)) as enum_count,
|
||||
(SELECT COUNT(*) FROM pg_tables WHERE schemaname = 'public') as table_count,
|
||||
(SELECT COUNT(*) FROM information_schema.columns WHERE table_name = 'users') as users_column_count;
|
||||
169
migrations/fix_missing_fields.sql
Normal file
169
migrations/fix_missing_fields.sql
Normal file
@@ -0,0 +1,169 @@
|
||||
-- ============================================================================
|
||||
-- Fix Missing Fields Script
|
||||
-- Safely adds missing fields without recreating existing structures
|
||||
-- ============================================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
\echo '=== FIXING USERS TABLE ==='
|
||||
|
||||
-- Fix TOS field name if needed (tos_accepted -> accepts_tos)
|
||||
DO $$
|
||||
BEGIN
|
||||
IF EXISTS (
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'users' AND column_name = 'tos_accepted'
|
||||
) AND NOT EXISTS (
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'users' AND column_name = 'accepts_tos'
|
||||
) THEN
|
||||
ALTER TABLE users RENAME COLUMN tos_accepted TO accepts_tos;
|
||||
RAISE NOTICE 'Renamed tos_accepted to accepts_tos';
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- Add reminder tracking fields if missing
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'users' AND column_name = 'email_verification_reminders_sent'
|
||||
) THEN
|
||||
ALTER TABLE users ADD COLUMN email_verification_reminders_sent INTEGER DEFAULT 0 NOT NULL;
|
||||
ALTER TABLE users ADD COLUMN last_email_verification_reminder_at TIMESTAMP WITH TIME ZONE;
|
||||
ALTER TABLE users ADD COLUMN event_attendance_reminders_sent INTEGER DEFAULT 0 NOT NULL;
|
||||
ALTER TABLE users ADD COLUMN last_event_attendance_reminder_at TIMESTAMP WITH TIME ZONE;
|
||||
ALTER TABLE users ADD COLUMN payment_reminders_sent INTEGER DEFAULT 0 NOT NULL;
|
||||
ALTER TABLE users ADD COLUMN last_payment_reminder_at TIMESTAMP WITH TIME ZONE;
|
||||
ALTER TABLE users ADD COLUMN renewal_reminders_sent INTEGER DEFAULT 0 NOT NULL;
|
||||
ALTER TABLE users ADD COLUMN last_renewal_reminder_at TIMESTAMP WITH TIME ZONE;
|
||||
RAISE NOTICE 'Added reminder tracking fields';
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- Add WordPress import fields if missing
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'users' AND column_name = 'import_source'
|
||||
) THEN
|
||||
ALTER TABLE users ADD COLUMN import_source VARCHAR(50);
|
||||
ALTER TABLE users ADD COLUMN import_job_id UUID REFERENCES import_jobs(id);
|
||||
ALTER TABLE users ADD COLUMN wordpress_user_id BIGINT;
|
||||
ALTER TABLE users ADD COLUMN wordpress_registered_date TIMESTAMP WITH TIME ZONE;
|
||||
RAISE NOTICE 'Added WordPress import tracking fields';
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
\echo '=== FIXING IMPORT_JOBS TABLE ==='
|
||||
|
||||
-- Add WordPress import enhancement fields if missing
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'import_jobs' AND column_name = 'field_mapping'
|
||||
) THEN
|
||||
ALTER TABLE import_jobs ADD COLUMN field_mapping JSONB DEFAULT '{}'::jsonb;
|
||||
ALTER TABLE import_jobs ADD COLUMN wordpress_metadata JSONB DEFAULT '{}'::jsonb;
|
||||
ALTER TABLE import_jobs ADD COLUMN imported_user_ids JSONB DEFAULT '[]'::jsonb;
|
||||
ALTER TABLE import_jobs ADD COLUMN rollback_at TIMESTAMP WITH TIME ZONE;
|
||||
ALTER TABLE import_jobs ADD COLUMN rollback_by UUID REFERENCES users(id);
|
||||
RAISE NOTICE 'Added WordPress import enhancement fields to import_jobs';
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- Add validating, preview_ready, rolled_back to ImportJobStatus enum if missing
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_enum e
|
||||
JOIN pg_type t ON e.enumtypid = t.oid
|
||||
WHERE t.typname = 'importjobstatus' AND e.enumlabel = 'validating'
|
||||
) THEN
|
||||
ALTER TYPE importjobstatus ADD VALUE IF NOT EXISTS 'validating';
|
||||
RAISE NOTICE 'Added validating to importjobstatus enum';
|
||||
END IF;
|
||||
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_enum e
|
||||
JOIN pg_type t ON e.enumtypid = t.oid
|
||||
WHERE t.typname = 'importjobstatus' AND e.enumlabel = 'preview_ready'
|
||||
) THEN
|
||||
ALTER TYPE importjobstatus ADD VALUE IF NOT EXISTS 'preview_ready';
|
||||
RAISE NOTICE 'Added preview_ready to importjobstatus enum';
|
||||
END IF;
|
||||
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_enum e
|
||||
JOIN pg_type t ON e.enumtypid = t.oid
|
||||
WHERE t.typname = 'importjobstatus' AND e.enumlabel = 'rolled_back'
|
||||
) THEN
|
||||
ALTER TYPE importjobstatus ADD VALUE IF NOT EXISTS 'rolled_back';
|
||||
RAISE NOTICE 'Added rolled_back to importjobstatus enum';
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- Add pending_validation, pre_validated, canceled, expired, abandoned to UserStatus enum if missing
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_enum e
|
||||
JOIN pg_type t ON e.enumtypid = t.oid
|
||||
WHERE t.typname = 'userstatus' AND e.enumlabel = 'pending_validation'
|
||||
) THEN
|
||||
ALTER TYPE userstatus ADD VALUE IF NOT EXISTS 'pending_validation';
|
||||
RAISE NOTICE 'Added pending_validation to userstatus enum';
|
||||
END IF;
|
||||
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_enum e
|
||||
JOIN pg_type t ON e.enumtypid = t.oid
|
||||
WHERE t.typname = 'userstatus' AND e.enumlabel = 'pre_validated'
|
||||
) THEN
|
||||
ALTER TYPE userstatus ADD VALUE IF NOT EXISTS 'pre_validated';
|
||||
RAISE NOTICE 'Added pre_validated to userstatus enum';
|
||||
END IF;
|
||||
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_enum e
|
||||
JOIN pg_type t ON e.enumtypid = t.oid
|
||||
WHERE t.typname = 'userstatus' AND e.enumlabel = 'canceled'
|
||||
) THEN
|
||||
ALTER TYPE userstatus ADD VALUE IF NOT EXISTS 'canceled';
|
||||
RAISE NOTICE 'Added canceled to userstatus enum';
|
||||
END IF;
|
||||
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_enum e
|
||||
JOIN pg_type t ON e.enumtypid = t.oid
|
||||
WHERE t.typname = 'userstatus' AND e.enumlabel = 'expired'
|
||||
) THEN
|
||||
ALTER TYPE userstatus ADD VALUE IF NOT EXISTS 'expired';
|
||||
RAISE NOTICE 'Added expired to userstatus enum';
|
||||
END IF;
|
||||
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_enum e
|
||||
JOIN pg_type t ON e.enumtypid = t.oid
|
||||
WHERE t.typname = 'userstatus' AND e.enumlabel = 'abandoned'
|
||||
) THEN
|
||||
ALTER TYPE userstatus ADD VALUE IF NOT EXISTS 'abandoned';
|
||||
RAISE NOTICE 'Added abandoned to userstatus enum';
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
COMMIT;
|
||||
|
||||
\echo ''
|
||||
\echo '=== VERIFICATION ==='
|
||||
SELECT
|
||||
(SELECT COUNT(*) FROM information_schema.columns WHERE table_name = 'users') as users_columns,
|
||||
(SELECT COUNT(*) FROM information_schema.columns WHERE table_name = 'import_jobs') as import_jobs_columns,
|
||||
(SELECT COUNT(*) FROM pg_enum e JOIN pg_type t ON e.enumtypid = t.oid WHERE t.typname = 'userstatus') as userstatus_values,
|
||||
(SELECT COUNT(*) FROM pg_enum e JOIN pg_type t ON e.enumtypid = t.oid WHERE t.typname = 'importjobstatus') as importjobstatus_values;
|
||||
|
||||
\echo ''
|
||||
\echo '✅ Missing fields have been added!'
|
||||
\echo 'You can now run: alembic stamp head'
|
||||
238
migrations/seed_data.sql
Normal file
238
migrations/seed_data.sql
Normal file
@@ -0,0 +1,238 @@
|
||||
-- ============================================================================
|
||||
-- Seed Data for LOAF Membership Platform
|
||||
-- Run this after creating the database schema
|
||||
-- ============================================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ============================================================================
|
||||
-- STEP 1: Create Default Roles
|
||||
-- ============================================================================
|
||||
|
||||
INSERT INTO roles (id, code, name, description, is_system_role, created_at, updated_at)
|
||||
VALUES
|
||||
(gen_random_uuid(), 'guest', 'Guest', 'Default role for new registrations', true, NOW(), NOW()),
|
||||
(gen_random_uuid(), 'member', 'Member', 'Active paying members with full access', true, NOW(), NOW()),
|
||||
(gen_random_uuid(), 'admin', 'Admin', 'Board members with management access', true, NOW(), NOW()),
|
||||
(gen_random_uuid(), 'finance', 'Finance', 'Treasurer role with financial access', true, NOW(), NOW()),
|
||||
(gen_random_uuid(), 'superadmin', 'Super Admin', 'Full system access', true, NOW(), NOW())
|
||||
ON CONFLICT (code) DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- STEP 2: Create Permissions
|
||||
-- ============================================================================
|
||||
|
||||
INSERT INTO permissions (id, code, name, description, module, created_at)
|
||||
VALUES
|
||||
-- User Management Permissions
|
||||
(gen_random_uuid(), 'users.view', 'View Users', 'View user list and profiles', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.create', 'Create Users', 'Create new users', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.edit', 'Edit Users', 'Edit user information', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.delete', 'Delete Users', 'Delete users', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.approve', 'Approve Users', 'Approve pending memberships', 'users', NOW()),
|
||||
(gen_random_uuid(), 'users.import', 'Import Users', 'Import users from CSV/external sources', 'users', NOW()),
|
||||
|
||||
-- Event Management Permissions
|
||||
(gen_random_uuid(), 'events.view', 'View Events', 'View event list and details', 'events', NOW()),
|
||||
(gen_random_uuid(), 'events.create', 'Create Events', 'Create new events', 'events', NOW()),
|
||||
(gen_random_uuid(), 'events.edit', 'Edit Events', 'Edit event information', 'events', NOW()),
|
||||
(gen_random_uuid(), 'events.delete', 'Delete Events', 'Delete events', 'events', NOW()),
|
||||
(gen_random_uuid(), 'events.publish', 'Publish Events', 'Publish/unpublish events', 'events', NOW()),
|
||||
(gen_random_uuid(), 'events.manage_attendance', 'Manage Attendance', 'Mark event attendance', 'events', NOW()),
|
||||
|
||||
-- Financial Permissions
|
||||
(gen_random_uuid(), 'finance.view', 'View Financial Data', 'View subscriptions and payments', 'finance', NOW()),
|
||||
(gen_random_uuid(), 'finance.manage_plans', 'Manage Subscription Plans', 'Create/edit subscription plans', 'finance', NOW()),
|
||||
(gen_random_uuid(), 'finance.manage_subscriptions', 'Manage Subscriptions', 'Manage user subscriptions', 'finance', NOW()),
|
||||
(gen_random_uuid(), 'finance.view_reports', 'View Financial Reports', 'Access financial reports', 'finance', NOW()),
|
||||
(gen_random_uuid(), 'finance.export', 'Export Financial Data', 'Export financial data', 'finance', NOW()),
|
||||
|
||||
-- Content Management Permissions
|
||||
(gen_random_uuid(), 'content.newsletters', 'Manage Newsletters', 'Manage newsletter archives', 'content', NOW()),
|
||||
(gen_random_uuid(), 'content.documents', 'Manage Documents', 'Manage bylaws and documents', 'content', NOW()),
|
||||
(gen_random_uuid(), 'content.gallery', 'Manage Gallery', 'Manage event galleries', 'content', NOW()),
|
||||
|
||||
-- System Permissions
|
||||
(gen_random_uuid(), 'system.settings', 'System Settings', 'Manage system settings', 'system', NOW()),
|
||||
(gen_random_uuid(), 'system.roles', 'Manage Roles', 'Create/edit roles and permissions', 'system', NOW()),
|
||||
(gen_random_uuid(), 'system.invitations', 'Manage Invitations', 'Send admin invitations', 'system', NOW()),
|
||||
(gen_random_uuid(), 'system.storage', 'Manage Storage', 'View storage usage', 'system', NOW()),
|
||||
(gen_random_uuid(), 'system.audit', 'View Audit Logs', 'View system audit logs', 'system', NOW())
|
||||
ON CONFLICT (code) DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- STEP 3: Assign Permissions to Roles
|
||||
-- ============================================================================
|
||||
|
||||
-- Guest Role: No permissions (view-only through public pages)
|
||||
-- No entries needed
|
||||
|
||||
-- Member Role: Limited permissions
|
||||
INSERT INTO role_permissions (id, role, role_id, permission_id, created_at)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
'member',
|
||||
(SELECT id FROM roles WHERE code = 'member'),
|
||||
p.id,
|
||||
NOW()
|
||||
FROM permissions p
|
||||
WHERE p.code IN (
|
||||
'events.view'
|
||||
)
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
-- Admin Role: Most permissions except financial
|
||||
INSERT INTO role_permissions (id, role, role_id, permission_id, created_at)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
'admin',
|
||||
(SELECT id FROM roles WHERE code = 'admin'),
|
||||
p.id,
|
||||
NOW()
|
||||
FROM permissions p
|
||||
WHERE p.code IN (
|
||||
-- User Management
|
||||
'users.view', 'users.create', 'users.edit', 'users.approve', 'users.import',
|
||||
-- Event Management
|
||||
'events.view', 'events.create', 'events.edit', 'events.delete', 'events.publish', 'events.manage_attendance',
|
||||
-- Content Management
|
||||
'content.newsletters', 'content.documents', 'content.gallery',
|
||||
-- System (limited)
|
||||
'system.invitations', 'system.storage'
|
||||
)
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
-- Finance Role: Financial permissions + basic access
|
||||
INSERT INTO role_permissions (id, role, role_id, permission_id, created_at)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
'finance',
|
||||
(SELECT id FROM roles WHERE code = 'finance'),
|
||||
p.id,
|
||||
NOW()
|
||||
FROM permissions p
|
||||
WHERE p.code IN (
|
||||
-- Financial
|
||||
'finance.view', 'finance.manage_plans', 'finance.manage_subscriptions', 'finance.view_reports', 'finance.export',
|
||||
-- Basic Access
|
||||
'users.view', 'events.view'
|
||||
)
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
-- Superadmin Role: All permissions
|
||||
INSERT INTO role_permissions (id, role, role_id, permission_id, created_at)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
'superadmin',
|
||||
(SELECT id FROM roles WHERE code = 'superadmin'),
|
||||
p.id,
|
||||
NOW()
|
||||
FROM permissions p
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- STEP 4: Create Subscription Plans
|
||||
-- ============================================================================
|
||||
|
||||
INSERT INTO subscription_plans (id, name, description, price_cents, billing_cycle, custom_cycle_enabled, minimum_price_cents, allow_donation, is_active, created_at, updated_at)
|
||||
VALUES
|
||||
-- Annual Individual Membership
|
||||
(
|
||||
gen_random_uuid(),
|
||||
'Annual Individual Membership',
|
||||
'Standard annual membership for one person. Includes access to all LOAF events, member directory, and exclusive content.',
|
||||
6000, -- $60.00
|
||||
'annual',
|
||||
false,
|
||||
6000,
|
||||
false,
|
||||
true,
|
||||
NOW(),
|
||||
NOW()
|
||||
),
|
||||
|
||||
-- Annual Group Membership
|
||||
(
|
||||
gen_random_uuid(),
|
||||
'Annual Group Membership',
|
||||
'Annual membership for two people living at the same address. Both members receive full access to all LOAF benefits.',
|
||||
10000, -- $100.00
|
||||
'annual',
|
||||
false,
|
||||
10000,
|
||||
false,
|
||||
true,
|
||||
NOW(),
|
||||
NOW()
|
||||
),
|
||||
|
||||
-- Pay What You Want (with minimum)
|
||||
(
|
||||
gen_random_uuid(),
|
||||
'Pay What You Want Membership',
|
||||
'Choose your own annual membership amount. Minimum $30. Additional contributions help support our scholarship fund.',
|
||||
3000, -- $30.00 minimum
|
||||
'annual',
|
||||
true, -- Allow custom amount
|
||||
3000, -- Minimum $30
|
||||
true, -- Additional amount is treated as donation
|
||||
true,
|
||||
NOW(),
|
||||
NOW()
|
||||
)
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- STEP 5: Initialize Storage Usage (if not already done)
|
||||
-- ============================================================================
|
||||
|
||||
INSERT INTO storage_usage (id, total_bytes_used, max_bytes_allowed, last_calculated_at, created_at, updated_at)
|
||||
VALUES (gen_random_uuid(), 0, 107374182400, NOW(), NOW(), NOW()) -- 100GB limit
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- ============================================================================
|
||||
-- Success Message
|
||||
-- ============================================================================
|
||||
|
||||
\echo '✅ Seed data created successfully!'
|
||||
\echo ''
|
||||
\echo 'Created:'
|
||||
\echo ' - 5 default roles (guest, member, admin, finance, superadmin)'
|
||||
\echo ' - 25 permissions across 5 modules'
|
||||
\echo ' - Role-permission mappings'
|
||||
\echo ' - 3 subscription plans'
|
||||
\echo ' - Storage usage initialization'
|
||||
\echo ''
|
||||
\echo 'Next steps:'
|
||||
\echo ' 1. Create superadmin user (see instructions below)'
|
||||
\echo ' 2. Configure Stripe price IDs in subscription_plans'
|
||||
\echo ' 3. Start the application'
|
||||
\echo ''
|
||||
\echo '━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━'
|
||||
\echo 'CREATE SUPERADMIN USER:'
|
||||
\echo '━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━'
|
||||
\echo ''
|
||||
\echo 'Generate password hash in Python:'
|
||||
\echo ' python3 -c "import bcrypt; print(bcrypt.hashpw(b\"your-password\", bcrypt.gensalt()).decode())"'
|
||||
\echo ''
|
||||
\echo 'Then run:'
|
||||
\echo ' psql -U postgres -d loaf_new'
|
||||
\echo ''
|
||||
\echo 'INSERT INTO users ('
|
||||
\echo ' id, email, password_hash, first_name, last_name,'
|
||||
\echo ' status, role, email_verified, created_at, updated_at'
|
||||
\echo ') VALUES ('
|
||||
\echo ' gen_random_uuid(),'
|
||||
\echo ' '\''admin@loafmembers.org'\'','
|
||||
\echo ' '\''$2b$12$YOUR_BCRYPT_HASH_HERE'\'','
|
||||
\echo ' '\''Admin'\'','
|
||||
\echo ' '\''User'\'','
|
||||
\echo ' '\''active'\'','
|
||||
\echo ' '\''superadmin'\'','
|
||||
\echo ' true,'
|
||||
\echo ' NOW(),'
|
||||
\echo ' NOW()'
|
||||
\echo ');'
|
||||
\echo ''
|
||||
361
models.py
361
models.py
@@ -1,4 +1,4 @@
|
||||
from sqlalchemy import Column, String, Boolean, DateTime, Enum as SQLEnum, Text, Integer, BigInteger, ForeignKey, JSON
|
||||
from sqlalchemy import Column, String, Boolean, DateTime, Enum as SQLEnum, Text, Integer, BigInteger, ForeignKey, JSON, Index
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy.orm import relationship
|
||||
from datetime import datetime, timezone
|
||||
@@ -8,16 +8,22 @@ from database import Base
|
||||
|
||||
class UserStatus(enum.Enum):
|
||||
pending_email = "pending_email"
|
||||
pending_approval = "pending_approval"
|
||||
pre_approved = "pre_approved"
|
||||
pending_validation = "pending_validation"
|
||||
pre_validated = "pre_validated"
|
||||
payment_pending = "payment_pending"
|
||||
active = "active"
|
||||
inactive = "inactive"
|
||||
canceled = "canceled" # User or admin canceled membership
|
||||
expired = "expired" # Subscription ended without renewal
|
||||
abandoned = "abandoned" # Incomplete registration (no verification/event/payment)
|
||||
rejected = "rejected" # Application rejected by admin
|
||||
|
||||
class UserRole(enum.Enum):
|
||||
guest = "guest"
|
||||
member = "member"
|
||||
admin = "admin"
|
||||
superadmin = "superadmin"
|
||||
finance = "finance"
|
||||
|
||||
class RSVPStatus(enum.Enum):
|
||||
yes = "yes"
|
||||
@@ -29,6 +35,22 @@ class SubscriptionStatus(enum.Enum):
|
||||
expired = "expired"
|
||||
cancelled = "cancelled"
|
||||
|
||||
class DonationType(enum.Enum):
|
||||
member = "member"
|
||||
public = "public"
|
||||
|
||||
class DonationStatus(enum.Enum):
|
||||
pending = "pending"
|
||||
completed = "completed"
|
||||
failed = "failed"
|
||||
|
||||
|
||||
class PaymentMethodType(enum.Enum):
|
||||
card = "card"
|
||||
cash = "cash"
|
||||
bank_transfer = "bank_transfer"
|
||||
check = "check"
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "users"
|
||||
|
||||
@@ -50,9 +72,11 @@ class User(Base):
|
||||
partner_plan_to_become_member = Column(Boolean, default=False)
|
||||
referred_by_member_name = Column(String, nullable=True)
|
||||
status = Column(SQLEnum(UserStatus), default=UserStatus.pending_email, nullable=False)
|
||||
role = Column(SQLEnum(UserRole), default=UserRole.guest, nullable=False)
|
||||
role = Column(SQLEnum(UserRole), default=UserRole.guest, nullable=False) # Legacy enum, kept for backward compatibility
|
||||
role_id = Column(UUID(as_uuid=True), ForeignKey("roles.id"), nullable=True) # New dynamic role FK
|
||||
email_verified = Column(Boolean, default=False)
|
||||
email_verification_token = Column(String, nullable=True)
|
||||
email_verification_expires = Column(DateTime, nullable=True)
|
||||
newsletter_subscribed = Column(Boolean, default=False)
|
||||
|
||||
# Newsletter Publication Preferences (Step 2)
|
||||
@@ -89,13 +113,103 @@ class User(Base):
|
||||
social_media_twitter = Column(String, nullable=True)
|
||||
social_media_linkedin = Column(String, nullable=True)
|
||||
|
||||
# Terms of Service Acceptance (Step 4)
|
||||
accepts_tos = Column(Boolean, default=False, nullable=False)
|
||||
tos_accepted_at = Column(DateTime, nullable=True)
|
||||
|
||||
# Member Since Date - Editable by admins for imported users
|
||||
member_since = Column(DateTime, nullable=True, comment="Date when user became a member - editable by admins for imported users")
|
||||
|
||||
# Reminder Tracking - for admin dashboard visibility
|
||||
email_verification_reminders_sent = Column(Integer, default=0, nullable=False, comment="Count of email verification reminders sent")
|
||||
last_email_verification_reminder_at = Column(DateTime, nullable=True, comment="Timestamp of last verification reminder")
|
||||
|
||||
event_attendance_reminders_sent = Column(Integer, default=0, nullable=False, comment="Count of event attendance reminders sent")
|
||||
last_event_attendance_reminder_at = Column(DateTime, nullable=True, comment="Timestamp of last event attendance reminder")
|
||||
|
||||
payment_reminders_sent = Column(Integer, default=0, nullable=False, comment="Count of payment reminders sent")
|
||||
last_payment_reminder_at = Column(DateTime, nullable=True, comment="Timestamp of last payment reminder")
|
||||
|
||||
renewal_reminders_sent = Column(Integer, default=0, nullable=False, comment="Count of renewal reminders sent")
|
||||
last_renewal_reminder_at = Column(DateTime, nullable=True, comment="Timestamp of last renewal reminder")
|
||||
|
||||
# Rejection Tracking
|
||||
rejection_reason = Column(Text, nullable=True, comment="Reason provided when application was rejected")
|
||||
rejected_at = Column(DateTime(timezone=True), nullable=True, comment="Timestamp when application was rejected")
|
||||
rejected_by = Column(UUID(as_uuid=True), ForeignKey('users.id'), nullable=True, comment="Admin who rejected the application")
|
||||
|
||||
# WordPress Import Tracking
|
||||
import_source = Column(String(50), nullable=True, comment="Source of user creation: wordpress, manual, registration")
|
||||
import_job_id = Column(UUID(as_uuid=True), ForeignKey('import_jobs.id'), nullable=True, comment="Import job that created this user")
|
||||
wordpress_user_id = Column(BigInteger, nullable=True, comment="Original WordPress user ID")
|
||||
wordpress_registered_date = Column(DateTime(timezone=True), nullable=True, comment="Original WordPress registration date")
|
||||
|
||||
# Role Change Audit Trail
|
||||
role_changed_at = Column(DateTime(timezone=True), nullable=True, comment="Timestamp when role was last changed")
|
||||
role_changed_by = Column(UUID(as_uuid=True), ForeignKey('users.id', ondelete='SET NULL'), nullable=True, comment="Admin who changed the role")
|
||||
|
||||
# Stripe Customer ID - Centralized for payment method management
|
||||
stripe_customer_id = Column(String, nullable=True, index=True, comment="Stripe Customer ID for payment method management")
|
||||
|
||||
# Dynamic Registration Form - Custom field responses
|
||||
custom_registration_data = Column(JSON, default=dict, nullable=False,
|
||||
comment="Dynamic registration field responses stored as JSON for custom form fields")
|
||||
|
||||
created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
|
||||
|
||||
# Relationships
|
||||
role_obj = relationship("Role", back_populates="users", foreign_keys=[role_id])
|
||||
events_created = relationship("Event", back_populates="creator")
|
||||
rsvps = relationship("EventRSVP", back_populates="user")
|
||||
subscriptions = relationship("Subscription", back_populates="user", foreign_keys="Subscription.user_id")
|
||||
role_changer = relationship("User", foreign_keys=[role_changed_by], remote_side="User.id", post_update=True)
|
||||
payment_methods = relationship("PaymentMethod", back_populates="user", foreign_keys="PaymentMethod.user_id")
|
||||
|
||||
|
||||
class PaymentMethod(Base):
|
||||
"""Stored payment methods for users (Stripe or manual records)"""
|
||||
__tablename__ = "payment_methods"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
user_id = Column(UUID(as_uuid=True), ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True)
|
||||
|
||||
# Stripe payment method reference
|
||||
stripe_payment_method_id = Column(String, nullable=True, unique=True, index=True, comment="Stripe pm_xxx reference")
|
||||
|
||||
# Card details (stored for display purposes - PCI compliant)
|
||||
card_brand = Column(String(20), nullable=True, comment="Card brand: visa, mastercard, amex, etc.")
|
||||
card_last4 = Column(String(4), nullable=True, comment="Last 4 digits of card")
|
||||
card_exp_month = Column(Integer, nullable=True, comment="Card expiration month")
|
||||
card_exp_year = Column(Integer, nullable=True, comment="Card expiration year")
|
||||
card_funding = Column(String(20), nullable=True, comment="Card funding type: credit, debit, prepaid")
|
||||
|
||||
# Payment type classification
|
||||
payment_type = Column(SQLEnum(PaymentMethodType), default=PaymentMethodType.card, nullable=False)
|
||||
|
||||
# Status flags
|
||||
is_default = Column(Boolean, default=False, nullable=False, comment="Whether this is the default payment method for auto-renewals")
|
||||
is_active = Column(Boolean, default=True, nullable=False, comment="Soft delete flag - False means removed")
|
||||
is_manual = Column(Boolean, default=False, nullable=False, comment="True for manually recorded methods (cash/check)")
|
||||
|
||||
# Manual payment notes (for cash/check records)
|
||||
manual_notes = Column(Text, nullable=True, comment="Admin notes for manual payment methods")
|
||||
|
||||
# Audit trail
|
||||
created_by = Column(UUID(as_uuid=True), ForeignKey("users.id", ondelete="SET NULL"), nullable=True, comment="Admin who added this on behalf of user")
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), nullable=False)
|
||||
updated_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc), nullable=False)
|
||||
|
||||
# Relationships
|
||||
user = relationship("User", back_populates="payment_methods", foreign_keys=[user_id])
|
||||
creator = relationship("User", foreign_keys=[created_by])
|
||||
|
||||
# Composite index for efficient queries
|
||||
__table_args__ = (
|
||||
Index('idx_payment_method_user_default', 'user_id', 'is_default'),
|
||||
Index('idx_payment_method_active', 'user_id', 'is_active'),
|
||||
)
|
||||
|
||||
|
||||
class Event(Base):
|
||||
__tablename__ = "events"
|
||||
@@ -184,6 +298,15 @@ class Subscription(Base):
|
||||
donation_cents = Column(Integer, default=0, nullable=False) # Additional donation amount
|
||||
# Note: amount_paid_cents = base_subscription_cents + donation_cents
|
||||
|
||||
# Stripe transaction metadata (for validation and audit)
|
||||
stripe_payment_intent_id = Column(String, nullable=True, index=True) # Initial payment transaction ID
|
||||
stripe_charge_id = Column(String, nullable=True, index=True) # Actual charge reference
|
||||
stripe_invoice_id = Column(String, nullable=True, index=True) # Invoice reference
|
||||
payment_completed_at = Column(DateTime(timezone=True), nullable=True) # Exact payment timestamp from Stripe
|
||||
card_last4 = Column(String(4), nullable=True) # Last 4 digits of card
|
||||
card_brand = Column(String(20), nullable=True) # Visa, Mastercard, etc.
|
||||
stripe_receipt_url = Column(String, nullable=True) # Link to Stripe receipt
|
||||
|
||||
# Manual payment fields
|
||||
manual_payment = Column(Boolean, default=False, nullable=False) # Whether this was a manual offline payment
|
||||
manual_payment_notes = Column(Text, nullable=True) # Admin notes about the payment
|
||||
@@ -198,6 +321,49 @@ class Subscription(Base):
|
||||
user = relationship("User", back_populates="subscriptions", foreign_keys=[user_id])
|
||||
plan = relationship("SubscriptionPlan", back_populates="subscriptions")
|
||||
|
||||
class Donation(Base):
|
||||
__tablename__ = "donations"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
|
||||
# Donation details
|
||||
amount_cents = Column(Integer, nullable=False)
|
||||
donation_type = Column(SQLEnum(DonationType), nullable=False, default=DonationType.public)
|
||||
status = Column(SQLEnum(DonationStatus), nullable=False, default=DonationStatus.pending)
|
||||
|
||||
# Donor information
|
||||
user_id = Column(UUID(as_uuid=True), ForeignKey('users.id'), nullable=True) # NULL for public donations
|
||||
donor_email = Column(String, nullable=True) # For non-members
|
||||
donor_name = Column(String, nullable=True) # For non-members
|
||||
|
||||
# Payment details
|
||||
stripe_checkout_session_id = Column(String, nullable=True)
|
||||
stripe_payment_intent_id = Column(String, nullable=True, index=True)
|
||||
payment_method = Column(String, nullable=True) # card, bank_transfer, etc.
|
||||
|
||||
# Stripe transaction metadata (for validation and audit)
|
||||
stripe_charge_id = Column(String, nullable=True, index=True) # Actual charge reference
|
||||
stripe_customer_id = Column(String, nullable=True, index=True) # Customer ID if created
|
||||
payment_completed_at = Column(DateTime(timezone=True), nullable=True) # Exact payment timestamp from Stripe
|
||||
card_last4 = Column(String(4), nullable=True) # Last 4 digits of card
|
||||
card_brand = Column(String(20), nullable=True) # Visa, Mastercard, etc.
|
||||
stripe_receipt_url = Column(String, nullable=True) # Link to Stripe receipt
|
||||
|
||||
# Metadata
|
||||
notes = Column(Text, nullable=True)
|
||||
created_at = Column(DateTime(timezone=True), default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(DateTime(timezone=True), onupdate=lambda: datetime.now(timezone.utc))
|
||||
|
||||
# Relationship
|
||||
user = relationship("User", backref="donations", foreign_keys=[user_id])
|
||||
|
||||
__table_args__ = (
|
||||
Index('idx_donation_user', 'user_id'),
|
||||
Index('idx_donation_type', 'donation_type'),
|
||||
Index('idx_donation_status', 'status'),
|
||||
Index('idx_donation_created', 'created_at'),
|
||||
)
|
||||
|
||||
class EventGallery(Base):
|
||||
__tablename__ = "event_galleries"
|
||||
|
||||
@@ -271,3 +437,190 @@ class StorageUsage(Base):
|
||||
total_bytes_used = Column(BigInteger, default=0)
|
||||
max_bytes_allowed = Column(BigInteger, nullable=False) # From .env
|
||||
last_updated = Column(DateTime, default=lambda: datetime.now(timezone.utc))
|
||||
|
||||
# ============================================================
|
||||
# RBAC Permission Management Models
|
||||
# ============================================================
|
||||
|
||||
class Permission(Base):
|
||||
"""Granular permissions for role-based access control"""
|
||||
__tablename__ = "permissions"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
code = Column(String, unique=True, nullable=False, index=True) # "users.create", "events.edit"
|
||||
name = Column(String, nullable=False) # "Create Users", "Edit Events"
|
||||
description = Column(Text, nullable=True)
|
||||
module = Column(String, nullable=False, index=True) # "users", "events", "subscriptions"
|
||||
created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc))
|
||||
|
||||
# Relationships
|
||||
role_permissions = relationship("RolePermission", back_populates="permission", cascade="all, delete-orphan")
|
||||
|
||||
class Role(Base):
|
||||
"""Dynamic roles that can be created by admins"""
|
||||
__tablename__ = "roles"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
code = Column(String, unique=True, nullable=False, index=True) # "superadmin", "finance", "custom_role_1"
|
||||
name = Column(String, nullable=False) # "Superadmin", "Finance Manager", "Custom Role"
|
||||
description = Column(Text, nullable=True)
|
||||
is_system_role = Column(Boolean, default=False, nullable=False) # True for Superadmin, Member, Guest (non-deletable)
|
||||
created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc))
|
||||
updated_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
|
||||
created_by = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=True)
|
||||
|
||||
# Relationships
|
||||
users = relationship("User", back_populates="role_obj", foreign_keys="User.role_id")
|
||||
role_permissions = relationship("RolePermission", back_populates="role_obj", cascade="all, delete-orphan")
|
||||
creator = relationship("User", foreign_keys=[created_by])
|
||||
|
||||
class RolePermission(Base):
|
||||
"""Junction table linking roles to permissions"""
|
||||
__tablename__ = "role_permissions"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
role = Column(SQLEnum(UserRole), nullable=False, index=True) # Legacy enum, kept for backward compatibility
|
||||
role_id = Column(UUID(as_uuid=True), ForeignKey("roles.id"), nullable=True, index=True) # New dynamic role FK
|
||||
permission_id = Column(UUID(as_uuid=True), ForeignKey("permissions.id"), nullable=False)
|
||||
created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc))
|
||||
created_by = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=True)
|
||||
|
||||
# Relationships
|
||||
role_obj = relationship("Role", back_populates="role_permissions")
|
||||
permission = relationship("Permission", back_populates="role_permissions")
|
||||
creator = relationship("User", foreign_keys=[created_by])
|
||||
|
||||
# Composite unique index
|
||||
__table_args__ = (
|
||||
Index('idx_role_permission', 'role', 'permission_id', unique=True),
|
||||
)
|
||||
|
||||
# ============================================================
|
||||
# User Invitation Models
|
||||
# ============================================================
|
||||
|
||||
class InvitationStatus(enum.Enum):
|
||||
pending = "pending"
|
||||
accepted = "accepted"
|
||||
expired = "expired"
|
||||
revoked = "revoked"
|
||||
|
||||
class UserInvitation(Base):
|
||||
"""Email-based user invitations with tokens"""
|
||||
__tablename__ = "user_invitations"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
email = Column(String, nullable=False, index=True)
|
||||
token = Column(String, unique=True, nullable=False, index=True)
|
||||
role = Column(SQLEnum(UserRole), nullable=False)
|
||||
status = Column(SQLEnum(InvitationStatus), default=InvitationStatus.pending, nullable=False)
|
||||
|
||||
# Optional pre-filled information
|
||||
first_name = Column(String, nullable=True)
|
||||
last_name = Column(String, nullable=True)
|
||||
phone = Column(String, nullable=True)
|
||||
|
||||
# Invitation tracking
|
||||
invited_by = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=False)
|
||||
invited_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), nullable=False)
|
||||
expires_at = Column(DateTime, nullable=False)
|
||||
accepted_at = Column(DateTime, nullable=True)
|
||||
accepted_by = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=True)
|
||||
|
||||
# Relationships
|
||||
inviter = relationship("User", foreign_keys=[invited_by])
|
||||
accepted_user = relationship("User", foreign_keys=[accepted_by])
|
||||
|
||||
# ============================================================
|
||||
# CSV Import/Export Models
|
||||
# ============================================================
|
||||
|
||||
class ImportJobStatus(enum.Enum):
|
||||
processing = "processing"
|
||||
completed = "completed"
|
||||
failed = "failed"
|
||||
partial = "partial"
|
||||
validating = "validating"
|
||||
preview_ready = "preview_ready"
|
||||
rolled_back = "rolled_back"
|
||||
|
||||
class ImportJob(Base):
|
||||
"""Track CSV import jobs with error handling"""
|
||||
__tablename__ = "import_jobs"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
filename = Column(String, nullable=False)
|
||||
file_key = Column(String, nullable=True) # R2 object key for uploaded CSV
|
||||
total_rows = Column(Integer, nullable=False)
|
||||
processed_rows = Column(Integer, default=0, nullable=False)
|
||||
successful_rows = Column(Integer, default=0, nullable=False)
|
||||
failed_rows = Column(Integer, default=0, nullable=False)
|
||||
status = Column(SQLEnum(ImportJobStatus), default=ImportJobStatus.processing, nullable=False)
|
||||
errors = Column(JSON, default=list, nullable=False) # [{row: 5, field: "email", error: "Invalid format"}]
|
||||
|
||||
# WordPress import enhancements
|
||||
field_mapping = Column(JSON, default=dict, nullable=False) # Maps CSV columns to DB fields
|
||||
wordpress_metadata = Column(JSON, default=dict, nullable=False) # Preview data, validation results
|
||||
imported_user_ids = Column(JSON, default=list, nullable=False) # User IDs for rollback
|
||||
rollback_at = Column(DateTime, nullable=True)
|
||||
rollback_by = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=True)
|
||||
|
||||
# Tracking
|
||||
imported_by = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=False)
|
||||
started_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), nullable=False)
|
||||
completed_at = Column(DateTime, nullable=True)
|
||||
|
||||
# Relationships
|
||||
importer = relationship("User", foreign_keys=[imported_by])
|
||||
rollback_user = relationship("User", foreign_keys=[rollback_by])
|
||||
|
||||
|
||||
class ImportRollbackAudit(Base):
|
||||
"""Audit trail for import rollback operations"""
|
||||
__tablename__ = "import_rollback_audit"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
import_job_id = Column(UUID(as_uuid=True), ForeignKey("import_jobs.id"), nullable=False)
|
||||
rolled_back_by = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=False)
|
||||
rolled_back_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), nullable=False)
|
||||
deleted_user_count = Column(Integer, nullable=False)
|
||||
deleted_user_ids = Column(JSON, nullable=False) # List of deleted user UUIDs
|
||||
reason = Column(Text, nullable=True)
|
||||
created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), nullable=False)
|
||||
|
||||
# Relationships
|
||||
import_job = relationship("ImportJob")
|
||||
admin_user = relationship("User", foreign_keys=[rolled_back_by])
|
||||
|
||||
|
||||
# ============================================================
|
||||
# System Settings Models
|
||||
# ============================================================
|
||||
|
||||
class SettingType(enum.Enum):
|
||||
plaintext = "plaintext"
|
||||
encrypted = "encrypted"
|
||||
json = "json"
|
||||
|
||||
|
||||
class SystemSettings(Base):
|
||||
"""System-wide configuration settings stored in database"""
|
||||
__tablename__ = "system_settings"
|
||||
|
||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
setting_key = Column(String(100), unique=True, nullable=False, index=True)
|
||||
setting_value = Column(Text, nullable=True)
|
||||
setting_type = Column(SQLEnum(SettingType), default=SettingType.plaintext, nullable=False)
|
||||
description = Column(Text, nullable=True)
|
||||
updated_by = Column(UUID(as_uuid=True), ForeignKey("users.id", ondelete="SET NULL"), nullable=True)
|
||||
created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), nullable=False)
|
||||
updated_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc), nullable=False)
|
||||
is_sensitive = Column(Boolean, default=False, nullable=False)
|
||||
|
||||
# Relationships
|
||||
updater = relationship("User", foreign_keys=[updated_by])
|
||||
|
||||
# Index on updated_at for audit queries
|
||||
__table_args__ = (
|
||||
Index('idx_system_settings_updated_at', 'updated_at'),
|
||||
)
|
||||
|
||||
@@ -11,11 +11,9 @@ from datetime import datetime, timezone, timedelta
|
||||
# Load environment variables
|
||||
load_dotenv()
|
||||
|
||||
# Initialize Stripe with secret key
|
||||
stripe.api_key = os.getenv("STRIPE_SECRET_KEY")
|
||||
|
||||
# Stripe webhook secret for signature verification
|
||||
STRIPE_WEBHOOK_SECRET = os.getenv("STRIPE_WEBHOOK_SECRET")
|
||||
# NOTE: Stripe credentials are now database-driven
|
||||
# These .env fallbacks are kept for backward compatibility only
|
||||
# The actual credentials are loaded dynamically from system_settings table
|
||||
|
||||
def create_checkout_session(
|
||||
user_id: str,
|
||||
@@ -23,11 +21,15 @@ def create_checkout_session(
|
||||
plan_id: str,
|
||||
stripe_price_id: str,
|
||||
success_url: str,
|
||||
cancel_url: str
|
||||
cancel_url: str,
|
||||
db = None
|
||||
):
|
||||
"""
|
||||
Create a Stripe Checkout session for subscription payment.
|
||||
|
||||
Args:
|
||||
db: Database session (optional, for reading Stripe credentials from database)
|
||||
|
||||
Args:
|
||||
user_id: User's UUID
|
||||
user_email: User's email address
|
||||
@@ -39,6 +41,28 @@ def create_checkout_session(
|
||||
Returns:
|
||||
dict: Checkout session object with session ID and URL
|
||||
"""
|
||||
# Load Stripe API key from database if available
|
||||
if db:
|
||||
try:
|
||||
# Import here to avoid circular dependency
|
||||
from models import SystemSettings, SettingType
|
||||
from encryption_service import get_encryption_service
|
||||
|
||||
setting = db.query(SystemSettings).filter(
|
||||
SystemSettings.setting_key == 'stripe_secret_key'
|
||||
).first()
|
||||
|
||||
if setting and setting.setting_value:
|
||||
encryption_service = get_encryption_service()
|
||||
stripe.api_key = encryption_service.decrypt(setting.setting_value)
|
||||
except Exception as e:
|
||||
# Fallback to .env if database read fails
|
||||
print(f"Failed to read Stripe key from database: {e}")
|
||||
stripe.api_key = os.getenv("STRIPE_SECRET_KEY")
|
||||
else:
|
||||
# Fallback to .env if no db session
|
||||
stripe.api_key = os.getenv("STRIPE_SECRET_KEY")
|
||||
|
||||
try:
|
||||
# Create Checkout Session
|
||||
checkout_session = stripe.checkout.Session.create(
|
||||
@@ -74,13 +98,14 @@ def create_checkout_session(
|
||||
raise Exception(f"Stripe error: {str(e)}")
|
||||
|
||||
|
||||
def verify_webhook_signature(payload: bytes, sig_header: str) -> dict:
|
||||
def verify_webhook_signature(payload: bytes, sig_header: str, db=None) -> dict:
|
||||
"""
|
||||
Verify Stripe webhook signature and construct event.
|
||||
|
||||
Args:
|
||||
payload: Raw webhook payload bytes
|
||||
sig_header: Stripe signature header
|
||||
db: Database session (optional, for reading webhook secret from database)
|
||||
|
||||
Returns:
|
||||
dict: Verified webhook event
|
||||
@@ -88,9 +113,32 @@ def verify_webhook_signature(payload: bytes, sig_header: str) -> dict:
|
||||
Raises:
|
||||
ValueError: If signature verification fails
|
||||
"""
|
||||
# Load webhook secret from database if available
|
||||
webhook_secret = None
|
||||
if db:
|
||||
try:
|
||||
from models import SystemSettings
|
||||
from encryption_service import get_encryption_service
|
||||
|
||||
setting = db.query(SystemSettings).filter(
|
||||
SystemSettings.setting_key == 'stripe_webhook_secret'
|
||||
).first()
|
||||
|
||||
if setting and setting.setting_value:
|
||||
encryption_service = get_encryption_service()
|
||||
webhook_secret = encryption_service.decrypt(setting.setting_value)
|
||||
except Exception as e:
|
||||
print(f"Failed to read webhook secret from database: {e}")
|
||||
webhook_secret = os.getenv("STRIPE_WEBHOOK_SECRET")
|
||||
else:
|
||||
webhook_secret = os.getenv("STRIPE_WEBHOOK_SECRET")
|
||||
|
||||
if not webhook_secret:
|
||||
raise ValueError("STRIPE_WEBHOOK_SECRET not configured")
|
||||
|
||||
try:
|
||||
event = stripe.Webhook.construct_event(
|
||||
payload, sig_header, STRIPE_WEBHOOK_SECRET
|
||||
payload, sig_header, webhook_secret
|
||||
)
|
||||
return event
|
||||
except ValueError as e:
|
||||
|
||||
615
permissions_seed.py
Normal file
615
permissions_seed.py
Normal file
@@ -0,0 +1,615 @@
|
||||
"""
|
||||
Permission Seeding Script
|
||||
|
||||
This script populates the database with 60+ granular permissions for RBAC.
|
||||
Permissions are organized into 9 modules: users, events, subscriptions,
|
||||
financials, newsletters, bylaws, gallery, settings, and permissions.
|
||||
|
||||
Usage:
|
||||
python permissions_seed.py
|
||||
|
||||
Environment Variables:
|
||||
DATABASE_URL - PostgreSQL connection string
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from database import Base
|
||||
from models import Permission, RolePermission, UserRole
|
||||
from dotenv import load_dotenv
|
||||
|
||||
# Load environment variables
|
||||
load_dotenv()
|
||||
|
||||
# Database connection
|
||||
DATABASE_URL = os.getenv("DATABASE_URL")
|
||||
if not DATABASE_URL:
|
||||
print("Error: DATABASE_URL environment variable not set")
|
||||
sys.exit(1)
|
||||
|
||||
engine = create_engine(DATABASE_URL)
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
# ============================================================
|
||||
# Permission Definitions
|
||||
# ============================================================
|
||||
|
||||
PERMISSIONS = [
|
||||
# ========== USERS MODULE ==========
|
||||
{
|
||||
"code": "users.view",
|
||||
"name": "View Users",
|
||||
"description": "View user list and user profiles",
|
||||
"module": "users"
|
||||
},
|
||||
{
|
||||
"code": "users.create",
|
||||
"name": "Create Users",
|
||||
"description": "Create new users and send invitations",
|
||||
"module": "users"
|
||||
},
|
||||
{
|
||||
"code": "users.edit",
|
||||
"name": "Edit Users",
|
||||
"description": "Edit user profiles and information",
|
||||
"module": "users"
|
||||
},
|
||||
{
|
||||
"code": "users.delete",
|
||||
"name": "Delete Users",
|
||||
"description": "Delete user accounts",
|
||||
"module": "users"
|
||||
},
|
||||
{
|
||||
"code": "users.status",
|
||||
"name": "Change User Status",
|
||||
"description": "Change user status (active, inactive, etc.)",
|
||||
"module": "users"
|
||||
},
|
||||
{
|
||||
"code": "users.approve",
|
||||
"name": "Approve/Validate Users",
|
||||
"description": "Approve or validate user applications",
|
||||
"module": "users"
|
||||
},
|
||||
{
|
||||
"code": "users.export",
|
||||
"name": "Export Users",
|
||||
"description": "Export user data to CSV",
|
||||
"module": "users"
|
||||
},
|
||||
{
|
||||
"code": "users.import",
|
||||
"name": "Import Users",
|
||||
"description": "Import users from CSV",
|
||||
"module": "users"
|
||||
},
|
||||
{
|
||||
"code": "users.reset_password",
|
||||
"name": "Reset User Password",
|
||||
"description": "Reset user passwords via email",
|
||||
"module": "users"
|
||||
},
|
||||
{
|
||||
"code": "users.resend_verification",
|
||||
"name": "Resend Verification Email",
|
||||
"description": "Resend email verification links",
|
||||
"module": "users"
|
||||
},
|
||||
|
||||
# ========== EVENTS MODULE ==========
|
||||
{
|
||||
"code": "events.view",
|
||||
"name": "View Events",
|
||||
"description": "View event list and event details",
|
||||
"module": "events"
|
||||
},
|
||||
{
|
||||
"code": "events.create",
|
||||
"name": "Create Events",
|
||||
"description": "Create new events",
|
||||
"module": "events"
|
||||
},
|
||||
{
|
||||
"code": "events.edit",
|
||||
"name": "Edit Events",
|
||||
"description": "Edit existing events",
|
||||
"module": "events"
|
||||
},
|
||||
{
|
||||
"code": "events.delete",
|
||||
"name": "Delete Events",
|
||||
"description": "Delete events",
|
||||
"module": "events"
|
||||
},
|
||||
{
|
||||
"code": "events.publish",
|
||||
"name": "Publish Events",
|
||||
"description": "Publish or unpublish events",
|
||||
"module": "events"
|
||||
},
|
||||
{
|
||||
"code": "events.attendance",
|
||||
"name": "Mark Event Attendance",
|
||||
"description": "Mark user attendance for events",
|
||||
"module": "events"
|
||||
},
|
||||
{
|
||||
"code": "events.rsvps",
|
||||
"name": "View Event RSVPs",
|
||||
"description": "View and manage event RSVPs",
|
||||
"module": "events"
|
||||
},
|
||||
{
|
||||
"code": "events.calendar_export",
|
||||
"name": "Export Event Calendar",
|
||||
"description": "Export events to iCal format",
|
||||
"module": "events"
|
||||
},
|
||||
|
||||
# ========== SUBSCRIPTIONS MODULE ==========
|
||||
{
|
||||
"code": "subscriptions.view",
|
||||
"name": "View Subscriptions",
|
||||
"description": "View subscription list and details",
|
||||
"module": "subscriptions"
|
||||
},
|
||||
{
|
||||
"code": "subscriptions.create",
|
||||
"name": "Create Subscriptions",
|
||||
"description": "Create manual subscriptions for users",
|
||||
"module": "subscriptions"
|
||||
},
|
||||
{
|
||||
"code": "subscriptions.edit",
|
||||
"name": "Edit Subscriptions",
|
||||
"description": "Edit subscription details",
|
||||
"module": "subscriptions"
|
||||
},
|
||||
{
|
||||
"code": "subscriptions.cancel",
|
||||
"name": "Cancel Subscriptions",
|
||||
"description": "Cancel user subscriptions",
|
||||
"module": "subscriptions"
|
||||
},
|
||||
{
|
||||
"code": "subscriptions.activate",
|
||||
"name": "Activate Subscriptions",
|
||||
"description": "Manually activate subscriptions",
|
||||
"module": "subscriptions"
|
||||
},
|
||||
{
|
||||
"code": "subscriptions.plans",
|
||||
"name": "Manage Subscription Plans",
|
||||
"description": "Create and edit subscription plans",
|
||||
"module": "subscriptions"
|
||||
},
|
||||
|
||||
# ========== FINANCIALS MODULE ==========
|
||||
{
|
||||
"code": "financials.view",
|
||||
"name": "View Financial Reports",
|
||||
"description": "View financial reports and dashboards",
|
||||
"module": "financials"
|
||||
},
|
||||
{
|
||||
"code": "financials.create",
|
||||
"name": "Create Financial Reports",
|
||||
"description": "Upload and create financial reports",
|
||||
"module": "financials"
|
||||
},
|
||||
{
|
||||
"code": "financials.edit",
|
||||
"name": "Edit Financial Reports",
|
||||
"description": "Edit existing financial reports",
|
||||
"module": "financials"
|
||||
},
|
||||
{
|
||||
"code": "financials.delete",
|
||||
"name": "Delete Financial Reports",
|
||||
"description": "Delete financial reports",
|
||||
"module": "financials"
|
||||
},
|
||||
{
|
||||
"code": "financials.export",
|
||||
"name": "Export Financial Data",
|
||||
"description": "Export financial data to CSV/PDF",
|
||||
"module": "financials"
|
||||
},
|
||||
{
|
||||
"code": "financials.payments",
|
||||
"name": "View Payment Details",
|
||||
"description": "View detailed payment information",
|
||||
"module": "financials"
|
||||
},
|
||||
|
||||
# ========== NEWSLETTERS MODULE ==========
|
||||
{
|
||||
"code": "newsletters.view",
|
||||
"name": "View Newsletters",
|
||||
"description": "View newsletter archives",
|
||||
"module": "newsletters"
|
||||
},
|
||||
{
|
||||
"code": "newsletters.create",
|
||||
"name": "Create Newsletters",
|
||||
"description": "Upload and create newsletters",
|
||||
"module": "newsletters"
|
||||
},
|
||||
{
|
||||
"code": "newsletters.edit",
|
||||
"name": "Edit Newsletters",
|
||||
"description": "Edit existing newsletters",
|
||||
"module": "newsletters"
|
||||
},
|
||||
{
|
||||
"code": "newsletters.delete",
|
||||
"name": "Delete Newsletters",
|
||||
"description": "Delete newsletter archives",
|
||||
"module": "newsletters"
|
||||
},
|
||||
{
|
||||
"code": "newsletters.send",
|
||||
"name": "Send Newsletters",
|
||||
"description": "Send newsletter emails to subscribers",
|
||||
"module": "newsletters"
|
||||
},
|
||||
{
|
||||
"code": "newsletters.subscribers",
|
||||
"name": "Manage Newsletter Subscribers",
|
||||
"description": "View and manage newsletter subscribers",
|
||||
"module": "newsletters"
|
||||
},
|
||||
|
||||
# ========== BYLAWS MODULE ==========
|
||||
{
|
||||
"code": "bylaws.view",
|
||||
"name": "View Bylaws",
|
||||
"description": "View organization bylaws documents",
|
||||
"module": "bylaws"
|
||||
},
|
||||
{
|
||||
"code": "bylaws.create",
|
||||
"name": "Create Bylaws",
|
||||
"description": "Upload new bylaws documents",
|
||||
"module": "bylaws"
|
||||
},
|
||||
{
|
||||
"code": "bylaws.edit",
|
||||
"name": "Edit Bylaws",
|
||||
"description": "Edit existing bylaws documents",
|
||||
"module": "bylaws"
|
||||
},
|
||||
{
|
||||
"code": "bylaws.delete",
|
||||
"name": "Delete Bylaws",
|
||||
"description": "Delete bylaws documents",
|
||||
"module": "bylaws"
|
||||
},
|
||||
{
|
||||
"code": "bylaws.publish",
|
||||
"name": "Publish Bylaws",
|
||||
"description": "Mark bylaws as current/published version",
|
||||
"module": "bylaws"
|
||||
},
|
||||
|
||||
# ========== GALLERY MODULE ==========
|
||||
{
|
||||
"code": "gallery.view",
|
||||
"name": "View Event Gallery",
|
||||
"description": "View event gallery photos",
|
||||
"module": "gallery"
|
||||
},
|
||||
{
|
||||
"code": "gallery.upload",
|
||||
"name": "Upload Photos",
|
||||
"description": "Upload photos to event galleries",
|
||||
"module": "gallery"
|
||||
},
|
||||
{
|
||||
"code": "gallery.edit",
|
||||
"name": "Edit Photos",
|
||||
"description": "Edit photo captions and details",
|
||||
"module": "gallery"
|
||||
},
|
||||
{
|
||||
"code": "gallery.delete",
|
||||
"name": "Delete Photos",
|
||||
"description": "Delete photos from galleries",
|
||||
"module": "gallery"
|
||||
},
|
||||
{
|
||||
"code": "gallery.moderate",
|
||||
"name": "Moderate Gallery Content",
|
||||
"description": "Approve/reject uploaded photos",
|
||||
"module": "gallery"
|
||||
},
|
||||
|
||||
# ========== PAYMENT METHODS MODULE ==========
|
||||
{
|
||||
"code": "payment_methods.view",
|
||||
"name": "View Payment Methods",
|
||||
"description": "View user payment methods (masked)",
|
||||
"module": "payment_methods"
|
||||
},
|
||||
{
|
||||
"code": "payment_methods.view_sensitive",
|
||||
"name": "View Sensitive Payment Details",
|
||||
"description": "View full payment method details including Stripe IDs (requires password)",
|
||||
"module": "payment_methods"
|
||||
},
|
||||
{
|
||||
"code": "payment_methods.create",
|
||||
"name": "Create Payment Methods",
|
||||
"description": "Add payment methods on behalf of users",
|
||||
"module": "payment_methods"
|
||||
},
|
||||
{
|
||||
"code": "payment_methods.delete",
|
||||
"name": "Delete Payment Methods",
|
||||
"description": "Delete user payment methods",
|
||||
"module": "payment_methods"
|
||||
},
|
||||
{
|
||||
"code": "payment_methods.set_default",
|
||||
"name": "Set Default Payment Method",
|
||||
"description": "Set a user's default payment method",
|
||||
"module": "payment_methods"
|
||||
},
|
||||
|
||||
# ========== SETTINGS MODULE ==========
|
||||
{
|
||||
"code": "settings.view",
|
||||
"name": "View Settings",
|
||||
"description": "View application settings",
|
||||
"module": "settings"
|
||||
},
|
||||
{
|
||||
"code": "settings.edit",
|
||||
"name": "Edit Settings",
|
||||
"description": "Edit application settings",
|
||||
"module": "settings"
|
||||
},
|
||||
{
|
||||
"code": "settings.email_templates",
|
||||
"name": "Manage Email Templates",
|
||||
"description": "Edit email templates and notifications",
|
||||
"module": "settings"
|
||||
},
|
||||
{
|
||||
"code": "settings.storage",
|
||||
"name": "Manage Storage",
|
||||
"description": "View and manage storage usage",
|
||||
"module": "settings"
|
||||
},
|
||||
{
|
||||
"code": "settings.backup",
|
||||
"name": "Backup & Restore",
|
||||
"description": "Create and restore database backups",
|
||||
"module": "settings"
|
||||
},
|
||||
{
|
||||
"code": "settings.logs",
|
||||
"name": "View System Logs",
|
||||
"description": "View application and audit logs",
|
||||
"module": "settings"
|
||||
},
|
||||
|
||||
# ========== PERMISSIONS MODULE (SUPERADMIN ONLY) ==========
|
||||
{
|
||||
"code": "permissions.view",
|
||||
"name": "View Permissions",
|
||||
"description": "View permission definitions and assignments",
|
||||
"module": "permissions"
|
||||
},
|
||||
{
|
||||
"code": "permissions.assign",
|
||||
"name": "Assign Permissions",
|
||||
"description": "Assign permissions to roles (SUPERADMIN ONLY)",
|
||||
"module": "permissions"
|
||||
},
|
||||
{
|
||||
"code": "permissions.manage_roles",
|
||||
"name": "Manage Roles",
|
||||
"description": "Create and manage user roles",
|
||||
"module": "permissions"
|
||||
},
|
||||
{
|
||||
"code": "permissions.audit",
|
||||
"name": "View Permission Audit Log",
|
||||
"description": "View permission change audit logs",
|
||||
"module": "permissions"
|
||||
},
|
||||
]
|
||||
|
||||
# Default permission assignments for each role
|
||||
DEFAULT_ROLE_PERMISSIONS = {
|
||||
UserRole.guest: [], # Guests have no admin permissions
|
||||
|
||||
UserRole.member: [
|
||||
# Members can view public content
|
||||
"events.view",
|
||||
"events.rsvps",
|
||||
"events.calendar_export",
|
||||
"newsletters.view",
|
||||
"bylaws.view",
|
||||
"gallery.view",
|
||||
],
|
||||
|
||||
UserRole.admin: [
|
||||
# Admins have most permissions except RBAC management
|
||||
"users.view",
|
||||
"users.create",
|
||||
"users.edit",
|
||||
"users.status",
|
||||
"users.approve",
|
||||
"users.export",
|
||||
"users.import",
|
||||
"users.reset_password",
|
||||
"users.resend_verification",
|
||||
"events.view",
|
||||
"events.create",
|
||||
"events.edit",
|
||||
"events.delete",
|
||||
"events.publish",
|
||||
"events.attendance",
|
||||
"events.rsvps",
|
||||
"events.calendar_export",
|
||||
"subscriptions.view",
|
||||
"subscriptions.create",
|
||||
"subscriptions.edit",
|
||||
"subscriptions.cancel",
|
||||
"subscriptions.activate",
|
||||
"subscriptions.plans",
|
||||
"financials.view",
|
||||
"financials.create",
|
||||
"financials.edit",
|
||||
"financials.delete",
|
||||
"financials.export",
|
||||
"financials.payments",
|
||||
"newsletters.view",
|
||||
"newsletters.create",
|
||||
"newsletters.edit",
|
||||
"newsletters.delete",
|
||||
"newsletters.send",
|
||||
"newsletters.subscribers",
|
||||
"bylaws.view",
|
||||
"bylaws.create",
|
||||
"bylaws.edit",
|
||||
"bylaws.delete",
|
||||
"bylaws.publish",
|
||||
"gallery.view",
|
||||
"gallery.upload",
|
||||
"gallery.edit",
|
||||
"gallery.delete",
|
||||
"gallery.moderate",
|
||||
"payment_methods.view",
|
||||
"payment_methods.create",
|
||||
"payment_methods.delete",
|
||||
"payment_methods.set_default",
|
||||
"settings.view",
|
||||
"settings.edit",
|
||||
"settings.email_templates",
|
||||
"settings.storage",
|
||||
"settings.logs",
|
||||
],
|
||||
|
||||
UserRole.finance: [
|
||||
# Finance role has all admin permissions plus sensitive payment access
|
||||
"users.view",
|
||||
"users.export",
|
||||
"events.view",
|
||||
"events.rsvps",
|
||||
"events.calendar_export",
|
||||
"subscriptions.view",
|
||||
"subscriptions.create",
|
||||
"subscriptions.edit",
|
||||
"subscriptions.cancel",
|
||||
"subscriptions.activate",
|
||||
"subscriptions.plans",
|
||||
"financials.view",
|
||||
"financials.create",
|
||||
"financials.edit",
|
||||
"financials.delete",
|
||||
"financials.export",
|
||||
"financials.payments",
|
||||
"newsletters.view",
|
||||
"bylaws.view",
|
||||
"gallery.view",
|
||||
"payment_methods.view",
|
||||
"payment_methods.view_sensitive", # Finance can view sensitive payment details
|
||||
"payment_methods.create",
|
||||
"payment_methods.delete",
|
||||
"payment_methods.set_default",
|
||||
"settings.view",
|
||||
],
|
||||
|
||||
# Superadmin gets all permissions automatically in code,
|
||||
# so we don't need to explicitly assign them
|
||||
UserRole.superadmin: []
|
||||
}
|
||||
|
||||
|
||||
def seed_permissions():
|
||||
"""Seed permissions and default role assignments"""
|
||||
db = SessionLocal()
|
||||
|
||||
try:
|
||||
print("🌱 Starting permission seeding...")
|
||||
|
||||
# Step 1: Clear existing permissions and role_permissions
|
||||
print("\n📦 Clearing existing permissions and role assignments...")
|
||||
db.query(RolePermission).delete()
|
||||
db.query(Permission).delete()
|
||||
db.commit()
|
||||
print("✓ Cleared existing data")
|
||||
|
||||
# Step 2: Create permissions
|
||||
print(f"\n📝 Creating {len(PERMISSIONS)} permissions...")
|
||||
permission_map = {} # Map code to permission object
|
||||
|
||||
for perm_data in PERMISSIONS:
|
||||
permission = Permission(
|
||||
code=perm_data["code"],
|
||||
name=perm_data["name"],
|
||||
description=perm_data["description"],
|
||||
module=perm_data["module"]
|
||||
)
|
||||
db.add(permission)
|
||||
permission_map[perm_data["code"]] = permission
|
||||
|
||||
db.commit()
|
||||
print(f"✓ Created {len(PERMISSIONS)} permissions")
|
||||
|
||||
# Step 3: Assign default permissions to roles
|
||||
print("\n🔐 Assigning default permissions to roles...")
|
||||
|
||||
for role, permission_codes in DEFAULT_ROLE_PERMISSIONS.items():
|
||||
if not permission_codes:
|
||||
print(f" • {role.value}: No default permissions (handled in code)")
|
||||
continue
|
||||
|
||||
for code in permission_codes:
|
||||
if code not in permission_map:
|
||||
print(f" ⚠️ Warning: Permission '{code}' not found for role {role.value}")
|
||||
continue
|
||||
|
||||
role_permission = RolePermission(
|
||||
role=role,
|
||||
permission_id=permission_map[code].id
|
||||
)
|
||||
db.add(role_permission)
|
||||
|
||||
db.commit()
|
||||
print(f" ✓ {role.value}: Assigned {len(permission_codes)} permissions")
|
||||
|
||||
# Step 4: Summary
|
||||
print("\n" + "="*60)
|
||||
print("📊 Seeding Summary:")
|
||||
print("="*60)
|
||||
|
||||
# Count permissions by module
|
||||
modules = {}
|
||||
for perm in PERMISSIONS:
|
||||
module = perm["module"]
|
||||
modules[module] = modules.get(module, 0) + 1
|
||||
|
||||
print("\nPermissions by module:")
|
||||
for module, count in sorted(modules.items()):
|
||||
print(f" • {module.capitalize()}: {count} permissions")
|
||||
|
||||
print(f"\nTotal permissions: {len(PERMISSIONS)}")
|
||||
print("\n✅ Permission seeding completed successfully!")
|
||||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
print(f"\n❌ Error seeding permissions: {str(e)}")
|
||||
raise
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
seed_permissions()
|
||||
@@ -35,6 +35,21 @@ class R2Storage:
|
||||
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': ['.xlsx']
|
||||
}
|
||||
|
||||
# Branding assets (logo and favicon)
|
||||
ALLOWED_BRANDING_TYPES = {
|
||||
'image/jpeg': ['.jpg', '.jpeg'],
|
||||
'image/png': ['.png'],
|
||||
'image/webp': ['.webp'],
|
||||
'image/svg+xml': ['.svg']
|
||||
}
|
||||
|
||||
ALLOWED_FAVICON_TYPES = {
|
||||
'image/x-icon': ['.ico'],
|
||||
'image/vnd.microsoft.icon': ['.ico'],
|
||||
'image/png': ['.png'],
|
||||
'image/svg+xml': ['.svg']
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize R2 client with credentials from environment"""
|
||||
self.account_id = os.getenv('R2_ACCOUNT_ID')
|
||||
|
||||
487
reminder_emails.py
Normal file
487
reminder_emails.py
Normal file
@@ -0,0 +1,487 @@
|
||||
"""
|
||||
Reminder Email System
|
||||
|
||||
This module handles all reminder emails sent before status transitions.
|
||||
Ensures users receive multiple reminders before any auto-abandonment occurs.
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Dict, List, Optional
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Reminder schedules (in days since status started)
|
||||
REMINDER_SCHEDULES = {
|
||||
'email_verification': [3, 7, 14, 30], # Before potential abandonment
|
||||
'event_attendance': [30, 60, 80, 85], # Before 90-day deadline
|
||||
'payment_pending': [7, 14, 21, 30, 45, 60], # Before potential abandonment
|
||||
'renewal': [60, 30, 14, 7], # Before expiration
|
||||
'post_expiration': [7, 30, 90] # After expiration
|
||||
}
|
||||
|
||||
|
||||
def get_days_since_status_change(user, current_status: str) -> int:
|
||||
"""
|
||||
Calculate number of days since user entered current status.
|
||||
|
||||
Args:
|
||||
user: User object
|
||||
current_status: Current status to check
|
||||
|
||||
Returns:
|
||||
Number of days since status change
|
||||
"""
|
||||
if not user.updated_at:
|
||||
return 0
|
||||
|
||||
delta = datetime.now(timezone.utc) - user.updated_at
|
||||
return delta.days
|
||||
|
||||
|
||||
def should_send_reminder(days_elapsed: int, schedule: List[int], last_reminder_day: Optional[int] = None) -> Optional[int]:
|
||||
"""
|
||||
Determine if a reminder should be sent based on elapsed days.
|
||||
|
||||
Args:
|
||||
days_elapsed: Days since status change
|
||||
schedule: List of reminder days
|
||||
last_reminder_day: Day of last reminder sent (optional)
|
||||
|
||||
Returns:
|
||||
Reminder day if should send, None otherwise
|
||||
"""
|
||||
for reminder_day in schedule:
|
||||
if days_elapsed >= reminder_day:
|
||||
# Check if we haven't sent this reminder yet
|
||||
if last_reminder_day is None or last_reminder_day < reminder_day:
|
||||
return reminder_day
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def send_email_verification_reminder(user, days_elapsed: int, email_service, db_session=None):
|
||||
"""
|
||||
Send email verification reminder.
|
||||
|
||||
Args:
|
||||
user: User object
|
||||
days_elapsed: Days since registration
|
||||
email_service: Email service instance
|
||||
db_session: Database session (optional, for tracking)
|
||||
|
||||
Returns:
|
||||
True if email sent successfully
|
||||
"""
|
||||
reminder_number = REMINDER_SCHEDULES['email_verification'].index(days_elapsed) + 1 if days_elapsed in REMINDER_SCHEDULES['email_verification'] else 0
|
||||
|
||||
subject = f"Reminder: Verify your email to complete registration"
|
||||
|
||||
if reminder_number == 4:
|
||||
# Final reminder
|
||||
message = f"""
|
||||
<h2>Final Reminder: Complete Your LOAF Registration</h2>
|
||||
<p>Hi {user.first_name},</p>
|
||||
<p>This is your final reminder to verify your email address and complete your LOAF membership registration.</p>
|
||||
<p>It's been {days_elapsed} days since you registered. If you don't verify your email soon,
|
||||
your application will be marked as abandoned and you'll need to contact us to restart the process.</p>
|
||||
<p>Click the link below to verify your email:</p>
|
||||
<p><a href="{email_service.get_verification_link(user)}">Verify Email Address</a></p>
|
||||
<p>Need help? Reply to this email or contact us at info@loaftx.org</p>
|
||||
<p>Best regards,<br>LOAF Team</p>
|
||||
"""
|
||||
else:
|
||||
message = f"""
|
||||
<h2>Reminder: Verify Your Email Address</h2>
|
||||
<p>Hi {user.first_name},</p>
|
||||
<p>You registered for LOAF membership {days_elapsed} days ago but haven't verified your email yet.</p>
|
||||
<p>Click the link below to verify your email and continue your membership journey:</p>
|
||||
<p><a href="{email_service.get_verification_link(user)}">Verify Email Address</a></p>
|
||||
<p>Once verified, you'll receive our monthly newsletter with event announcements!</p>
|
||||
<p>Best regards,<br>LOAF Team</p>
|
||||
"""
|
||||
|
||||
try:
|
||||
email_service.send_email(user.email, subject, message)
|
||||
logger.info(f"Sent email verification reminder #{reminder_number} to user {user.id} (day {days_elapsed})")
|
||||
|
||||
# Track reminder in database for admin visibility
|
||||
if db_session:
|
||||
user.email_verification_reminders_sent = (user.email_verification_reminders_sent or 0) + 1
|
||||
user.last_email_verification_reminder_at = datetime.now(timezone.utc)
|
||||
db_session.commit()
|
||||
logger.info(f"Updated reminder tracking: user {user.id} has received {user.email_verification_reminders_sent} verification reminders")
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send email verification reminder to user {user.id}: {str(e)}")
|
||||
return False
|
||||
|
||||
|
||||
def send_event_attendance_reminder(user, days_elapsed: int, email_service, db_session=None):
|
||||
"""
|
||||
Send event attendance reminder.
|
||||
|
||||
Args:
|
||||
user: User object
|
||||
days_elapsed: Days since email verification
|
||||
email_service: Email service instance
|
||||
db_session: Database session (optional, for tracking)
|
||||
|
||||
Returns:
|
||||
True if email sent successfully
|
||||
"""
|
||||
days_remaining = 90 - days_elapsed
|
||||
|
||||
subject = f"Reminder: Attend a LOAF event ({days_remaining} days remaining)"
|
||||
|
||||
if days_elapsed >= 85:
|
||||
# Final reminder (5 days left)
|
||||
message = f"""
|
||||
<h2>Final Reminder: Only {days_remaining} Days to Attend an Event!</h2>
|
||||
<p>Hi {user.first_name},</p>
|
||||
<p><strong>Important:</strong> You have only {days_remaining} days left to attend a LOAF event
|
||||
and complete your membership application.</p>
|
||||
<p>If you don't attend an event within the 90-day period, your application will be marked as
|
||||
abandoned per LOAF policy, and you'll need to contact us to restart.</p>
|
||||
<p>Check out our upcoming events in the monthly newsletter or visit our events page!</p>
|
||||
<p>Need help finding an event? Reply to this email or contact us at info@loaftx.org</p>
|
||||
<p>We'd love to meet you soon!</p>
|
||||
<p>Best regards,<br>LOAF Team</p>
|
||||
"""
|
||||
elif days_elapsed >= 80:
|
||||
# 10 days left
|
||||
message = f"""
|
||||
<h2>Reminder: {days_remaining} Days to Attend a LOAF Event</h2>
|
||||
<p>Hi {user.first_name},</p>
|
||||
<p>Just a friendly reminder that you have {days_remaining} days left to attend a LOAF event
|
||||
and complete your membership application.</p>
|
||||
<p>Per LOAF policy, new applicants must attend an event within 90 days of email verification
|
||||
to continue the membership process.</p>
|
||||
<p>Check your newsletter for upcoming events, and we look forward to meeting you soon!</p>
|
||||
<p>Best regards,<br>LOAF Team</p>
|
||||
"""
|
||||
elif days_elapsed >= 60:
|
||||
# 30 days left
|
||||
message = f"""
|
||||
<h2>Reminder: {days_remaining} Days to Attend a LOAF Event</h2>
|
||||
<p>Hi {user.first_name},</p>
|
||||
<p>You have {days_remaining} days remaining to attend a LOAF event as part of your membership application.</p>
|
||||
<p>Attending an event is a great way to meet other members and learn more about LOAF.
|
||||
Check out the upcoming events in your monthly newsletter!</p>
|
||||
<p>We look forward to seeing you soon!</p>
|
||||
<p>Best regards,<br>LOAF Team</p>
|
||||
"""
|
||||
else:
|
||||
# 60 days left
|
||||
message = f"""
|
||||
<h2>Reminder: Attend a LOAF Event (60 Days Remaining)</h2>
|
||||
<p>Hi {user.first_name},</p>
|
||||
<p>Welcome to LOAF! As part of your membership application, you have 90 days to attend one of our events.</p>
|
||||
<p>You have {days_remaining} days remaining to attend an event and continue your membership journey.</p>
|
||||
<p>Check out the events listed in your monthly newsletter. We can't wait to meet you!</p>
|
||||
<p>Best regards,<br>LOAF Team</p>
|
||||
"""
|
||||
|
||||
try:
|
||||
email_service.send_email(user.email, subject, message)
|
||||
logger.info(f"Sent event attendance reminder to user {user.id} (day {days_elapsed}, {days_remaining} days left)")
|
||||
|
||||
# Track reminder in database for admin visibility
|
||||
if db_session:
|
||||
user.event_attendance_reminders_sent = (user.event_attendance_reminders_sent or 0) + 1
|
||||
user.last_event_attendance_reminder_at = datetime.now(timezone.utc)
|
||||
db_session.commit()
|
||||
logger.info(f"Updated reminder tracking: user {user.id} has received {user.event_attendance_reminders_sent} event attendance reminders")
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send event attendance reminder to user {user.id}: {str(e)}")
|
||||
return False
|
||||
|
||||
|
||||
def send_payment_reminder(user, days_elapsed: int, email_service, db_session=None):
|
||||
"""
|
||||
Send payment reminder.
|
||||
|
||||
Args:
|
||||
user: User object
|
||||
days_elapsed: Days since admin validation
|
||||
email_service: Email service instance
|
||||
db_session: Database session (optional, for tracking)
|
||||
|
||||
Returns:
|
||||
True if email sent successfully
|
||||
"""
|
||||
reminder_count = sum(1 for day in REMINDER_SCHEDULES['payment_pending'] if day <= days_elapsed)
|
||||
|
||||
subject = f"Reminder: Complete your LOAF membership payment"
|
||||
|
||||
if days_elapsed >= 60:
|
||||
# Final reminder
|
||||
message = f"""
|
||||
<h2>Final Payment Reminder</h2>
|
||||
<p>Hi {user.first_name},</p>
|
||||
<p>Congratulations again on being validated for LOAF membership!</p>
|
||||
<p>This is a final reminder to complete your membership payment. It's been {days_elapsed} days
|
||||
since your application was validated.</p>
|
||||
<p>Your payment link is still active. Click below to complete your payment and activate your membership:</p>
|
||||
<p><a href="{email_service.get_payment_link(user)}">Complete Payment</a></p>
|
||||
<p>Once payment is complete, you'll gain full access to all member benefits!</p>
|
||||
<p>Questions? Contact us at info@loaftx.org</p>
|
||||
<p>Best regards,<br>LOAF Team</p>
|
||||
"""
|
||||
elif days_elapsed >= 45:
|
||||
message = f"""
|
||||
<h2>Payment Reminder - Complete Your Membership</h2>
|
||||
<p>Hi {user.first_name},</p>
|
||||
<p>Your LOAF membership application was validated and is ready for payment!</p>
|
||||
<p>Complete your payment to activate your membership and gain access to all member benefits:</p>
|
||||
<p><a href="{email_service.get_payment_link(user)}">Complete Payment</a></p>
|
||||
<p>We're excited to welcome you as a full member!</p>
|
||||
<p>Best regards,<br>LOAF Team</p>
|
||||
"""
|
||||
else:
|
||||
message = f"""
|
||||
<h2>Payment Reminder</h2>
|
||||
<p>Hi {user.first_name},</p>
|
||||
<p>This is a friendly reminder to complete your LOAF membership payment.</p>
|
||||
<p>Your application was validated {days_elapsed} days ago. Click below to complete payment:</p>
|
||||
<p><a href="{email_service.get_payment_link(user)}">Complete Payment</a></p>
|
||||
<p>Questions about payment options? Contact us at info@loaftx.org</p>
|
||||
<p>Best regards,<br>LOAF Team</p>
|
||||
"""
|
||||
|
||||
try:
|
||||
email_service.send_email(user.email, subject, message)
|
||||
logger.info(f"Sent payment reminder #{reminder_count} to user {user.id} (day {days_elapsed})")
|
||||
|
||||
# Track reminder in database for admin visibility
|
||||
if db_session:
|
||||
user.payment_reminders_sent = (user.payment_reminders_sent or 0) + 1
|
||||
user.last_payment_reminder_at = datetime.now(timezone.utc)
|
||||
db_session.commit()
|
||||
logger.info(f"Updated reminder tracking: user {user.id} has received {user.payment_reminders_sent} payment reminders")
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send payment reminder to user {user.id}: {str(e)}")
|
||||
return False
|
||||
|
||||
|
||||
def send_renewal_reminder(user, subscription, days_until_expiration: int, email_service, db_session=None):
|
||||
"""
|
||||
Send membership renewal reminder.
|
||||
|
||||
Args:
|
||||
user: User object
|
||||
subscription: Subscription object
|
||||
days_until_expiration: Days until subscription expires
|
||||
email_service: Email service instance
|
||||
db_session: Database session (optional, for tracking)
|
||||
|
||||
Returns:
|
||||
True if email sent successfully
|
||||
"""
|
||||
subject = f"Reminder: Your LOAF membership expires in {days_until_expiration} days"
|
||||
|
||||
if days_until_expiration <= 7:
|
||||
# Final reminder
|
||||
message = f"""
|
||||
<h2>Final Reminder: Renew Your LOAF Membership</h2>
|
||||
<p>Hi {user.first_name},</p>
|
||||
<p><strong>Your LOAF membership expires in {days_until_expiration} days!</strong></p>
|
||||
<p>Don't lose access to member benefits. Renew now to continue enjoying:</p>
|
||||
<ul>
|
||||
<li>Exclusive member events</li>
|
||||
<li>Member directory access</li>
|
||||
<li>Monthly newsletter</li>
|
||||
<li>Community connection</li>
|
||||
</ul>
|
||||
<p><a href="{email_service.get_renewal_link(user)}">Renew Your Membership Now</a></p>
|
||||
<p>Questions? Contact us at info@loaftx.org</p>
|
||||
<p>Best regards,<br>LOAF Team</p>
|
||||
"""
|
||||
else:
|
||||
message = f"""
|
||||
<h2>Reminder: Renew Your LOAF Membership</h2>
|
||||
<p>Hi {user.first_name},</p>
|
||||
<p>Your LOAF membership will expire in {days_until_expiration} days.</p>
|
||||
<p>Renew now to continue enjoying all member benefits without interruption:</p>
|
||||
<p><a href="{email_service.get_renewal_link(user)}">Renew Your Membership</a></p>
|
||||
<p>Thank you for being part of the LOAF community!</p>
|
||||
<p>Best regards,<br>LOAF Team</p>
|
||||
"""
|
||||
|
||||
try:
|
||||
email_service.send_email(user.email, subject, message)
|
||||
logger.info(f"Sent renewal reminder to user {user.id} ({days_until_expiration} days until expiration)")
|
||||
|
||||
# Track reminder in database for admin visibility
|
||||
if db_session:
|
||||
user.renewal_reminders_sent = (user.renewal_reminders_sent or 0) + 1
|
||||
user.last_renewal_reminder_at = datetime.now(timezone.utc)
|
||||
db_session.commit()
|
||||
logger.info(f"Updated reminder tracking: user {user.id} has received {user.renewal_reminders_sent} renewal reminders")
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send renewal reminder to user {user.id}: {str(e)}")
|
||||
return False
|
||||
|
||||
|
||||
def send_post_expiration_reminder(user, days_since_expiration: int, email_service):
|
||||
"""
|
||||
Send reminder to renew after membership has expired.
|
||||
|
||||
Args:
|
||||
user: User object
|
||||
days_since_expiration: Days since expiration
|
||||
email_service: Email service instance
|
||||
|
||||
Returns:
|
||||
True if email sent successfully
|
||||
"""
|
||||
subject = "We'd love to have you back at LOAF!"
|
||||
|
||||
if days_since_expiration >= 90:
|
||||
# Final reminder
|
||||
message = f"""
|
||||
<h2>We Miss You at LOAF!</h2>
|
||||
<p>Hi {user.first_name},</p>
|
||||
<p>Your LOAF membership expired {days_since_expiration} days ago, and we'd love to have you back!</p>
|
||||
<p>Rejoin the community and reconnect with friends:</p>
|
||||
<p><a href="{email_service.get_renewal_link(user)}">Renew Your Membership</a></p>
|
||||
<p>Questions? We're here to help: info@loaftx.org</p>
|
||||
<p>Best regards,<br>LOAF Team</p>
|
||||
"""
|
||||
elif days_since_expiration >= 30:
|
||||
message = f"""
|
||||
<h2>Renew Your LOAF Membership</h2>
|
||||
<p>Hi {user.first_name},</p>
|
||||
<p>Your LOAF membership expired {days_since_expiration} days ago.</p>
|
||||
<p>We'd love to have you back! Renew today to regain access to:</p>
|
||||
<ul>
|
||||
<li>Member events and gatherings</li>
|
||||
<li>Member directory</li>
|
||||
<li>Community connection</li>
|
||||
</ul>
|
||||
<p><a href="{email_service.get_renewal_link(user)}">Renew Your Membership</a></p>
|
||||
<p>Best regards,<br>LOAF Team</p>
|
||||
"""
|
||||
else:
|
||||
# 7 days after expiration
|
||||
message = f"""
|
||||
<h2>Your LOAF Membership Has Expired</h2>
|
||||
<p>Hi {user.first_name},</p>
|
||||
<p>Your LOAF membership expired recently. We hope it was just an oversight!</p>
|
||||
<p>Renew now to restore your access to all member benefits:</p>
|
||||
<p><a href="{email_service.get_renewal_link(user)}">Renew Your Membership</a></p>
|
||||
<p>We look forward to seeing you at upcoming events!</p>
|
||||
<p>Best regards,<br>LOAF Team</p>
|
||||
"""
|
||||
|
||||
try:
|
||||
email_service.send_email(user.email, subject, message)
|
||||
logger.info(f"Sent post-expiration reminder to user {user.id} ({days_since_expiration} days since expiration)")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send post-expiration reminder to user {user.id}: {str(e)}")
|
||||
return False
|
||||
|
||||
|
||||
# Background job for sending reminder emails
|
||||
def process_reminder_emails(db_session, email_service):
|
||||
"""
|
||||
Process and send all due reminder emails.
|
||||
|
||||
This should be run as an hourly background job.
|
||||
|
||||
Args:
|
||||
db_session: Database session
|
||||
email_service: Email service instance
|
||||
|
||||
Returns:
|
||||
Dictionary with counts of emails sent
|
||||
"""
|
||||
from models import User, UserStatus, Subscription
|
||||
from datetime import date
|
||||
|
||||
results = {
|
||||
'email_verification': 0,
|
||||
'event_attendance': 0,
|
||||
'payment': 0,
|
||||
'renewal': 0,
|
||||
'post_expiration': 0
|
||||
}
|
||||
|
||||
# 1. Email Verification Reminders
|
||||
for reminder_day in REMINDER_SCHEDULES['email_verification']:
|
||||
users = db_session.query(User).filter(
|
||||
User.status == UserStatus.pending_email,
|
||||
User.email_verified == False
|
||||
).all()
|
||||
|
||||
for user in users:
|
||||
days_elapsed = get_days_since_status_change(user, 'pending_email')
|
||||
if days_elapsed == reminder_day:
|
||||
if send_email_verification_reminder(user, days_elapsed, email_service, db_session):
|
||||
results['email_verification'] += 1
|
||||
|
||||
# 2. Event Attendance Reminders
|
||||
for reminder_day in REMINDER_SCHEDULES['event_attendance']:
|
||||
users = db_session.query(User).filter(
|
||||
User.status == UserStatus.pending_validation
|
||||
).all()
|
||||
|
||||
for user in users:
|
||||
days_elapsed = get_days_since_status_change(user, 'pending_validation')
|
||||
if days_elapsed == reminder_day:
|
||||
if send_event_attendance_reminder(user, days_elapsed, email_service, db_session):
|
||||
results['event_attendance'] += 1
|
||||
|
||||
# 3. Payment Reminders
|
||||
for reminder_day in REMINDER_SCHEDULES['payment_pending']:
|
||||
users = db_session.query(User).filter(
|
||||
User.status == UserStatus.payment_pending
|
||||
).all()
|
||||
|
||||
for user in users:
|
||||
days_elapsed = get_days_since_status_change(user, 'payment_pending')
|
||||
if days_elapsed == reminder_day:
|
||||
if send_payment_reminder(user, days_elapsed, email_service, db_session):
|
||||
results['payment'] += 1
|
||||
|
||||
# 4. Renewal Reminders (before expiration)
|
||||
for days_before in REMINDER_SCHEDULES['renewal']:
|
||||
# Find active subscriptions expiring in X days
|
||||
target_date = date.today() + timedelta(days=days_before)
|
||||
|
||||
subscriptions = db_session.query(User, Subscription).join(
|
||||
Subscription, User.id == Subscription.user_id
|
||||
).filter(
|
||||
User.status == UserStatus.active,
|
||||
Subscription.end_date == target_date
|
||||
).all()
|
||||
|
||||
for user, subscription in subscriptions:
|
||||
if send_renewal_reminder(user, subscription, days_before, email_service, db_session):
|
||||
results['renewal'] += 1
|
||||
|
||||
# 5. Post-Expiration Reminders
|
||||
for days_after in REMINDER_SCHEDULES['post_expiration']:
|
||||
target_date = date.today() - timedelta(days=days_after)
|
||||
|
||||
subscriptions = db_session.query(User, Subscription).join(
|
||||
Subscription, User.id == Subscription.user_id
|
||||
).filter(
|
||||
User.status == UserStatus.expired,
|
||||
Subscription.end_date == target_date
|
||||
).all()
|
||||
|
||||
for user, subscription in subscriptions:
|
||||
if send_post_expiration_reminder(user, days_after, email_service):
|
||||
results['post_expiration'] += 1
|
||||
|
||||
logger.info(f"Reminder email batch complete: {results}")
|
||||
return results
|
||||
@@ -1,4 +1,5 @@
|
||||
aiosmtplib==5.0.0
|
||||
alembic==1.14.0
|
||||
annotated-types==0.7.0
|
||||
anyio==4.11.0
|
||||
bcrypt==4.1.3
|
||||
@@ -37,6 +38,7 @@ pandas==2.3.3
|
||||
passlib==1.7.4
|
||||
pathspec==0.12.1
|
||||
pillow==10.2.0
|
||||
phpserialize==1.3
|
||||
platformdirs==4.5.0
|
||||
pluggy==1.6.0
|
||||
psycopg2-binary==2.9.11
|
||||
|
||||
147
roles_seed.py
Normal file
147
roles_seed.py
Normal file
@@ -0,0 +1,147 @@
|
||||
"""
|
||||
Role Seeding Script
|
||||
|
||||
This script populates the database with system roles for the dynamic RBAC system.
|
||||
Creates 4 system roles: Superadmin, Finance, Member, and Guest.
|
||||
|
||||
Usage:
|
||||
python roles_seed.py
|
||||
|
||||
Environment Variables:
|
||||
DATABASE_URL - PostgreSQL connection string
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from database import Base
|
||||
from models import Role
|
||||
from dotenv import load_dotenv
|
||||
|
||||
# Load environment variables
|
||||
load_dotenv()
|
||||
|
||||
# Database connection
|
||||
DATABASE_URL = os.getenv("DATABASE_URL")
|
||||
if not DATABASE_URL:
|
||||
print("Error: DATABASE_URL environment variable not set")
|
||||
sys.exit(1)
|
||||
|
||||
engine = create_engine(DATABASE_URL)
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
# ============================================================
|
||||
# System Role Definitions
|
||||
# ============================================================
|
||||
|
||||
SYSTEM_ROLES = [
|
||||
{
|
||||
"code": "superadmin",
|
||||
"name": "Superadmin",
|
||||
"description": "Full system access with all permissions. Can manage roles, permissions, and all platform features.",
|
||||
"is_system_role": True
|
||||
},
|
||||
{
|
||||
"code": "admin",
|
||||
"name": "Admin",
|
||||
"description": "Administrative access to most platform features. Can manage users, events, and content.",
|
||||
"is_system_role": True
|
||||
},
|
||||
{
|
||||
"code": "finance",
|
||||
"name": "Finance Manager",
|
||||
"description": "Access to financial features including subscriptions, payments, and financial reports.",
|
||||
"is_system_role": True
|
||||
},
|
||||
{
|
||||
"code": "member",
|
||||
"name": "Member",
|
||||
"description": "Standard member access. Can view events, manage profile, and participate in community features.",
|
||||
"is_system_role": True
|
||||
},
|
||||
{
|
||||
"code": "guest",
|
||||
"name": "Guest",
|
||||
"description": "Limited access for unverified or pending users. Can view basic information and complete registration.",
|
||||
"is_system_role": True
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
def seed_roles():
|
||||
"""Seed system roles into the database"""
|
||||
db = SessionLocal()
|
||||
|
||||
try:
|
||||
print("🌱 Starting role seeding...")
|
||||
print("="*60)
|
||||
|
||||
# Check if roles already exist
|
||||
existing_roles = db.query(Role).filter(Role.is_system_role == True).all()
|
||||
if existing_roles:
|
||||
print(f"\n⚠️ Found {len(existing_roles)} existing system roles:")
|
||||
for role in existing_roles:
|
||||
print(f" • {role.name} ({role.code})")
|
||||
|
||||
response = input("\nDo you want to recreate system roles? This will delete existing system roles. (yes/no): ")
|
||||
if response.lower() != 'yes':
|
||||
print("\n❌ Seeding cancelled by user")
|
||||
return
|
||||
|
||||
print("\n🗑️ Deleting existing system roles...")
|
||||
for role in existing_roles:
|
||||
db.delete(role)
|
||||
db.commit()
|
||||
print("✓ Deleted existing system roles")
|
||||
|
||||
# Create system roles
|
||||
print(f"\n📝 Creating {len(SYSTEM_ROLES)} system roles...")
|
||||
created_roles = []
|
||||
|
||||
for role_data in SYSTEM_ROLES:
|
||||
role = Role(
|
||||
code=role_data["code"],
|
||||
name=role_data["name"],
|
||||
description=role_data["description"],
|
||||
is_system_role=role_data["is_system_role"],
|
||||
created_by=None # System roles have no creator
|
||||
)
|
||||
db.add(role)
|
||||
created_roles.append(role)
|
||||
print(f" ✓ Created: {role.name} ({role.code})")
|
||||
|
||||
db.commit()
|
||||
print(f"\n✅ Created {len(created_roles)} system roles")
|
||||
|
||||
# Display summary
|
||||
print("\n" + "="*60)
|
||||
print("📊 Seeding Summary:")
|
||||
print("="*60)
|
||||
print("\nSystem Roles Created:")
|
||||
for role in created_roles:
|
||||
print(f"\n • {role.name} ({role.code})")
|
||||
print(f" {role.description}")
|
||||
|
||||
print("\n" + "="*60)
|
||||
print("✅ Role seeding completed successfully!")
|
||||
print("="*60)
|
||||
|
||||
print("\n📝 Next Steps:")
|
||||
print(" 1. Migrate existing users to use role_id (Phase 3)")
|
||||
print(" 2. Migrate role_permissions to use role_id (Phase 3)")
|
||||
print(" 3. Update authentication logic to use dynamic roles (Phase 3)")
|
||||
print(" 4. Remove legacy enum columns (Phase 4)")
|
||||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
print(f"\n❌ Error seeding roles: {str(e)}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
raise
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
seed_roles()
|
||||
380
seed_permissions_rbac.py
Executable file
380
seed_permissions_rbac.py
Executable file
@@ -0,0 +1,380 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Permission Seeding Script for Dynamic RBAC System
|
||||
|
||||
This script populates the database with 65 granular permissions and assigns them
|
||||
to the appropriate dynamic roles (not the old enum roles).
|
||||
|
||||
Usage:
|
||||
python3 seed_permissions_rbac.py
|
||||
|
||||
Environment Variables:
|
||||
DATABASE_URL - PostgreSQL connection string
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from database import Base
|
||||
from models import Permission, RolePermission, Role
|
||||
from dotenv import load_dotenv
|
||||
|
||||
# Load environment variables
|
||||
load_dotenv()
|
||||
|
||||
# Database connection
|
||||
DATABASE_URL = os.getenv("DATABASE_URL")
|
||||
if not DATABASE_URL:
|
||||
print("Error: DATABASE_URL environment variable not set")
|
||||
sys.exit(1)
|
||||
|
||||
engine = create_engine(DATABASE_URL)
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
# ============================================================
|
||||
# Permission Definitions (65 permissions across 11 modules)
|
||||
# ============================================================
|
||||
|
||||
PERMISSIONS = [
|
||||
# ========== USERS MODULE (10) ==========
|
||||
{"code": "users.view", "name": "View Users", "description": "View user list and user profiles", "module": "users"},
|
||||
{"code": "users.create", "name": "Create Users", "description": "Create new users and send invitations", "module": "users"},
|
||||
{"code": "users.edit", "name": "Edit Users", "description": "Edit user profiles and information", "module": "users"},
|
||||
{"code": "users.delete", "name": "Delete Users", "description": "Delete user accounts", "module": "users"},
|
||||
{"code": "users.status", "name": "Change User Status", "description": "Change user status (active, inactive, etc.)", "module": "users"},
|
||||
{"code": "users.approve", "name": "Approve/Validate Users", "description": "Approve or validate user applications", "module": "users"},
|
||||
{"code": "users.export", "name": "Export Users", "description": "Export user data to CSV", "module": "users"},
|
||||
{"code": "users.import", "name": "Import Users", "description": "Import users from CSV", "module": "users"},
|
||||
{"code": "users.reset_password", "name": "Reset User Password", "description": "Reset user passwords via email", "module": "users"},
|
||||
{"code": "users.resend_verification", "name": "Resend Verification Email", "description": "Resend email verification links", "module": "users"},
|
||||
{"code": "users.invite", "name": "Invite Users", "description": "Send user invitations", "module": "users"},
|
||||
|
||||
# ========== EVENTS MODULE (8) ==========
|
||||
{"code": "events.view", "name": "View Events", "description": "View event list and event details", "module": "events"},
|
||||
{"code": "events.create", "name": "Create Events", "description": "Create new events", "module": "events"},
|
||||
{"code": "events.edit", "name": "Edit Events", "description": "Edit existing events", "module": "events"},
|
||||
{"code": "events.delete", "name": "Delete Events", "description": "Delete events", "module": "events"},
|
||||
{"code": "events.publish", "name": "Publish Events", "description": "Publish or unpublish events", "module": "events"},
|
||||
{"code": "events.attendance", "name": "Mark Event Attendance", "description": "Mark user attendance for events", "module": "events"},
|
||||
{"code": "events.rsvps", "name": "View Event RSVPs", "description": "View and manage event RSVPs", "module": "events"},
|
||||
{"code": "events.calendar_export", "name": "Export Event Calendar", "description": "Export events to iCal format", "module": "events"},
|
||||
|
||||
# ========== SUBSCRIPTIONS MODULE (7) ==========
|
||||
{"code": "subscriptions.view", "name": "View Subscriptions", "description": "View subscription list and details", "module": "subscriptions"},
|
||||
{"code": "subscriptions.create", "name": "Create Subscriptions", "description": "Create manual subscriptions for users", "module": "subscriptions"},
|
||||
{"code": "subscriptions.edit", "name": "Edit Subscriptions", "description": "Edit subscription details", "module": "subscriptions"},
|
||||
{"code": "subscriptions.cancel", "name": "Cancel Subscriptions", "description": "Cancel user subscriptions", "module": "subscriptions"},
|
||||
{"code": "subscriptions.activate", "name": "Activate Subscriptions", "description": "Manually activate subscriptions", "module": "subscriptions"},
|
||||
{"code": "subscriptions.plans", "name": "Manage Subscription Plans", "description": "Create and edit subscription plans", "module": "subscriptions"},
|
||||
{"code": "subscriptions.export", "name": "Export Subscriptions", "description": "Export subscription data to CSV", "module": "subscriptions"},
|
||||
|
||||
# ========== DONATIONS MODULE (2) ==========
|
||||
{"code": "donations.view", "name": "View Donations", "description": "View donation list and details", "module": "donations"},
|
||||
{"code": "donations.export", "name": "Export Donations", "description": "Export donation data to CSV", "module": "donations"},
|
||||
|
||||
# ========== FINANCIALS MODULE (6) ==========
|
||||
{"code": "financials.view", "name": "View Financial Reports", "description": "View financial reports and dashboards", "module": "financials"},
|
||||
{"code": "financials.create", "name": "Create Financial Reports", "description": "Upload and create financial reports", "module": "financials"},
|
||||
{"code": "financials.edit", "name": "Edit Financial Reports", "description": "Edit existing financial reports", "module": "financials"},
|
||||
{"code": "financials.delete", "name": "Delete Financial Reports", "description": "Delete financial reports", "module": "financials"},
|
||||
{"code": "financials.export", "name": "Export Financial Data", "description": "Export financial data to CSV/PDF", "module": "financials"},
|
||||
{"code": "financials.payments", "name": "View Payment Details", "description": "View detailed payment information", "module": "financials"},
|
||||
|
||||
# ========== NEWSLETTERS MODULE (6) ==========
|
||||
{"code": "newsletters.view", "name": "View Newsletters", "description": "View newsletter archives", "module": "newsletters"},
|
||||
{"code": "newsletters.create", "name": "Create Newsletters", "description": "Upload and create newsletters", "module": "newsletters"},
|
||||
{"code": "newsletters.edit", "name": "Edit Newsletters", "description": "Edit existing newsletters", "module": "newsletters"},
|
||||
{"code": "newsletters.delete", "name": "Delete Newsletters", "description": "Delete newsletter archives", "module": "newsletters"},
|
||||
{"code": "newsletters.send", "name": "Send Newsletters", "description": "Send newsletter emails to subscribers", "module": "newsletters"},
|
||||
{"code": "newsletters.subscribers", "name": "Manage Newsletter Subscribers", "description": "View and manage newsletter subscribers", "module": "newsletters"},
|
||||
|
||||
# ========== BYLAWS MODULE (5) ==========
|
||||
{"code": "bylaws.view", "name": "View Bylaws", "description": "View organization bylaws documents", "module": "bylaws"},
|
||||
{"code": "bylaws.create", "name": "Create Bylaws", "description": "Upload new bylaws documents", "module": "bylaws"},
|
||||
{"code": "bylaws.edit", "name": "Edit Bylaws", "description": "Edit existing bylaws documents", "module": "bylaws"},
|
||||
{"code": "bylaws.delete", "name": "Delete Bylaws", "description": "Delete bylaws documents", "module": "bylaws"},
|
||||
{"code": "bylaws.publish", "name": "Publish Bylaws", "description": "Mark bylaws as current/published version", "module": "bylaws"},
|
||||
|
||||
# ========== GALLERY MODULE (5) ==========
|
||||
{"code": "gallery.view", "name": "View Event Gallery", "description": "View event gallery photos", "module": "gallery"},
|
||||
{"code": "gallery.upload", "name": "Upload Photos", "description": "Upload photos to event galleries", "module": "gallery"},
|
||||
{"code": "gallery.edit", "name": "Edit Photos", "description": "Edit photo captions and details", "module": "gallery"},
|
||||
{"code": "gallery.delete", "name": "Delete Photos", "description": "Delete photos from galleries", "module": "gallery"},
|
||||
{"code": "gallery.moderate", "name": "Moderate Gallery Content", "description": "Approve/reject uploaded photos", "module": "gallery"},
|
||||
|
||||
# ========== SETTINGS MODULE (6) ==========
|
||||
{"code": "settings.view", "name": "View Settings", "description": "View application settings", "module": "settings"},
|
||||
{"code": "settings.edit", "name": "Edit Settings", "description": "Edit application settings", "module": "settings"},
|
||||
{"code": "settings.email_templates", "name": "Manage Email Templates", "description": "Edit email templates and notifications", "module": "settings"},
|
||||
{"code": "settings.storage", "name": "Manage Storage", "description": "View and manage storage usage", "module": "settings"},
|
||||
{"code": "settings.backup", "name": "Backup & Restore", "description": "Create and restore database backups", "module": "settings"},
|
||||
{"code": "settings.logs", "name": "View System Logs", "description": "View application and audit logs", "module": "settings"},
|
||||
|
||||
# ========== PERMISSIONS MODULE (4) ==========
|
||||
{"code": "permissions.view", "name": "View Permissions", "description": "View permission definitions and assignments", "module": "permissions"},
|
||||
{"code": "permissions.assign", "name": "Assign Permissions", "description": "Assign permissions to roles", "module": "permissions"},
|
||||
{"code": "permissions.manage_roles", "name": "Manage Roles", "description": "Create and manage user roles", "module": "permissions"},
|
||||
{"code": "permissions.audit", "name": "View Permission Audit Log", "description": "View permission change audit logs", "module": "permissions"},
|
||||
|
||||
# ========== PAYMENT METHODS MODULE (5) ==========
|
||||
{"code": "payment_methods.view", "name": "View Payment Methods", "description": "View user payment methods (masked)", "module": "payment_methods"},
|
||||
{"code": "payment_methods.view_sensitive", "name": "View Sensitive Payment Details", "description": "View full Stripe payment method IDs (requires password)", "module": "payment_methods"},
|
||||
{"code": "payment_methods.create", "name": "Create Payment Methods", "description": "Add payment methods on behalf of users", "module": "payment_methods"},
|
||||
{"code": "payment_methods.delete", "name": "Delete Payment Methods", "description": "Remove user payment methods", "module": "payment_methods"},
|
||||
{"code": "payment_methods.set_default", "name": "Set Default Payment Method", "description": "Set default payment method for users", "module": "payment_methods"},
|
||||
|
||||
# ========== REGISTRATION MODULE (2) ==========
|
||||
{"code": "registration.view", "name": "View Registration Settings", "description": "View registration form schema and settings", "module": "registration"},
|
||||
{"code": "registration.manage", "name": "Manage Registration Form", "description": "Edit registration form schema, steps, and fields", "module": "registration"},
|
||||
|
||||
# ========== DIRECTORY MODULE (2) ==========
|
||||
{"code": "directory.view", "name": "View Directory Settings", "description": "View member directory field configuration", "module": "directory"},
|
||||
{"code": "directory.manage", "name": "Manage Directory Fields", "description": "Enable/disable directory fields shown in Profile and Directory pages", "module": "directory"},
|
||||
]
|
||||
|
||||
# Default system roles that must exist
|
||||
DEFAULT_ROLES = [
|
||||
{
|
||||
"code": "guest",
|
||||
"name": "Guest",
|
||||
"description": "Default role for new registrations with no special permissions",
|
||||
"is_system_role": True
|
||||
},
|
||||
{
|
||||
"code": "member",
|
||||
"name": "Member",
|
||||
"description": "Active paying members with access to member-only content",
|
||||
"is_system_role": True
|
||||
},
|
||||
{
|
||||
"code": "finance",
|
||||
"name": "Finance",
|
||||
"description": "Financial management role with access to payments, subscriptions, and reports",
|
||||
"is_system_role": True
|
||||
},
|
||||
{
|
||||
"code": "admin",
|
||||
"name": "Admin",
|
||||
"description": "Board members with full management access except RBAC",
|
||||
"is_system_role": True
|
||||
},
|
||||
{
|
||||
"code": "superadmin",
|
||||
"name": "Superadmin",
|
||||
"description": "Full system access including RBAC management",
|
||||
"is_system_role": True
|
||||
},
|
||||
]
|
||||
|
||||
# Default permission assignments for dynamic roles
|
||||
DEFAULT_ROLE_PERMISSIONS = {
|
||||
"guest": [], # Guests have no permissions
|
||||
|
||||
"member": [
|
||||
# Members can view public content
|
||||
"events.view", "events.rsvps", "events.calendar_export",
|
||||
"newsletters.view", "bylaws.view", "gallery.view",
|
||||
],
|
||||
|
||||
"finance": [
|
||||
# Finance role has financial permissions + some user viewing
|
||||
"users.view", "financials.view", "financials.create", "financials.edit",
|
||||
"financials.delete", "financials.export", "financials.payments",
|
||||
"subscriptions.view", "subscriptions.create", "subscriptions.edit",
|
||||
"subscriptions.cancel", "subscriptions.activate", "subscriptions.plans",
|
||||
"subscriptions.export",
|
||||
"donations.view", "donations.export",
|
||||
# Payment methods - finance can view sensitive details
|
||||
"payment_methods.view", "payment_methods.view_sensitive",
|
||||
"payment_methods.create", "payment_methods.delete", "payment_methods.set_default",
|
||||
],
|
||||
|
||||
"admin": [
|
||||
# Admins have most permissions except RBAC management
|
||||
"users.view", "users.create", "users.edit", "users.status", "users.approve",
|
||||
"users.export", "users.import", "users.reset_password", "users.resend_verification",
|
||||
"users.invite",
|
||||
"events.view", "events.create", "events.edit", "events.delete", "events.publish",
|
||||
"events.attendance", "events.rsvps", "events.calendar_export",
|
||||
"subscriptions.view", "subscriptions.create", "subscriptions.edit",
|
||||
"subscriptions.cancel", "subscriptions.activate", "subscriptions.plans",
|
||||
"subscriptions.export",
|
||||
"donations.view", "donations.export",
|
||||
"financials.view", "financials.create", "financials.edit", "financials.delete",
|
||||
"financials.export", "financials.payments",
|
||||
"newsletters.view", "newsletters.create", "newsletters.edit", "newsletters.delete",
|
||||
"newsletters.send", "newsletters.subscribers",
|
||||
"bylaws.view", "bylaws.create", "bylaws.edit", "bylaws.delete", "bylaws.publish",
|
||||
"gallery.view", "gallery.upload", "gallery.edit", "gallery.delete", "gallery.moderate",
|
||||
"settings.view", "settings.edit", "settings.email_templates", "settings.storage",
|
||||
"settings.logs",
|
||||
# Payment methods - admin can manage but not view sensitive details
|
||||
"payment_methods.view", "payment_methods.create",
|
||||
"payment_methods.delete", "payment_methods.set_default",
|
||||
# Registration form management
|
||||
"registration.view", "registration.manage",
|
||||
# Directory configuration
|
||||
"directory.view", "directory.manage",
|
||||
],
|
||||
|
||||
"superadmin": [
|
||||
# Superadmin gets ALL permissions
|
||||
*[p["code"] for p in PERMISSIONS]
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
def seed_permissions():
|
||||
"""Seed permissions and assign them to dynamic roles"""
|
||||
db = SessionLocal()
|
||||
|
||||
try:
|
||||
print("=" * 80)
|
||||
print("🌱 PERMISSION SEEDING FOR DYNAMIC RBAC SYSTEM")
|
||||
print("=" * 80)
|
||||
|
||||
# Step 1: Clear existing permissions and role_permissions
|
||||
print("\n📦 Clearing existing permissions and role assignments...")
|
||||
|
||||
# Use raw SQL to ensure complete deletion
|
||||
from sqlalchemy import text
|
||||
db.execute(text("DELETE FROM role_permissions"))
|
||||
db.commit()
|
||||
db.execute(text("DELETE FROM permissions"))
|
||||
db.commit()
|
||||
|
||||
# Verify they're cleared
|
||||
rp_count = db.execute(text("SELECT COUNT(*) FROM role_permissions")).scalar()
|
||||
p_count = db.execute(text("SELECT COUNT(*) FROM permissions")).scalar()
|
||||
|
||||
print(f"✓ Cleared role-permission mappings (verified: {rp_count} remaining)")
|
||||
print(f"✓ Cleared permissions (verified: {p_count} remaining)")
|
||||
|
||||
if rp_count > 0 or p_count > 0:
|
||||
print(f"\n⚠️ WARNING: Tables not fully cleared! Stopping.")
|
||||
return
|
||||
|
||||
# Step 2: Create default system roles
|
||||
print(f"\n👤 Creating {len(DEFAULT_ROLES)} system roles...")
|
||||
role_map = {}
|
||||
|
||||
for role_data in DEFAULT_ROLES:
|
||||
# Check if role already exists
|
||||
existing_role = db.query(Role).filter(Role.code == role_data["code"]).first()
|
||||
if existing_role:
|
||||
print(f" • {role_data['name']}: Already exists, updating...")
|
||||
existing_role.name = role_data["name"]
|
||||
existing_role.description = role_data["description"]
|
||||
existing_role.is_system_role = role_data["is_system_role"]
|
||||
role_map[role_data["code"]] = existing_role
|
||||
else:
|
||||
print(f" • {role_data['name']}: Creating...")
|
||||
role = Role(
|
||||
code=role_data["code"],
|
||||
name=role_data["name"],
|
||||
description=role_data["description"],
|
||||
is_system_role=role_data["is_system_role"]
|
||||
)
|
||||
db.add(role)
|
||||
role_map[role_data["code"]] = role
|
||||
|
||||
db.commit()
|
||||
print(f"✓ Created/updated {len(DEFAULT_ROLES)} system roles")
|
||||
|
||||
# Step 3: Create permissions
|
||||
print(f"\n📝 Creating {len(PERMISSIONS)} permissions...")
|
||||
permission_map = {} # Map code to permission object
|
||||
|
||||
for perm_data in PERMISSIONS:
|
||||
permission = Permission(
|
||||
code=perm_data["code"],
|
||||
name=perm_data["name"],
|
||||
description=perm_data["description"],
|
||||
module=perm_data["module"]
|
||||
)
|
||||
db.add(permission)
|
||||
permission_map[perm_data["code"]] = permission
|
||||
|
||||
db.commit()
|
||||
print(f"✓ Created {len(PERMISSIONS)} permissions")
|
||||
|
||||
# Step 4: Verify roles exist
|
||||
print("\n🔍 Verifying dynamic roles...")
|
||||
roles = db.query(Role).all()
|
||||
role_map = {role.code: role for role in roles}
|
||||
print(f"✓ Found {len(roles)} roles: {', '.join(role_map.keys())}")
|
||||
|
||||
# Step 5: Assign permissions to roles
|
||||
print("\n🔐 Assigning permissions to roles...")
|
||||
|
||||
from models import UserRole # Import for enum mapping
|
||||
|
||||
# Enum mapping for backward compatibility
|
||||
role_enum_map = {
|
||||
'guest': UserRole.guest,
|
||||
'member': UserRole.member,
|
||||
'admin': UserRole.admin,
|
||||
'superadmin': UserRole.superadmin,
|
||||
'finance': UserRole.finance # Finance has its own enum value
|
||||
}
|
||||
|
||||
total_assigned = 0
|
||||
for role_code, permission_codes in DEFAULT_ROLE_PERMISSIONS.items():
|
||||
if role_code not in role_map:
|
||||
print(f" ⚠️ Warning: Role '{role_code}' not found in database, skipping")
|
||||
continue
|
||||
|
||||
role = role_map[role_code]
|
||||
role_enum = role_enum_map.get(role_code, UserRole.guest)
|
||||
|
||||
for perm_code in permission_codes:
|
||||
if perm_code not in permission_map:
|
||||
print(f" ⚠️ Warning: Permission '{perm_code}' not found")
|
||||
continue
|
||||
|
||||
role_permission = RolePermission(
|
||||
role=role_enum, # Legacy enum for backward compatibility
|
||||
role_id=role.id, # New dynamic role system
|
||||
permission_id=permission_map[perm_code].id
|
||||
)
|
||||
db.add(role_permission)
|
||||
total_assigned += 1
|
||||
|
||||
db.commit()
|
||||
print(f" ✓ {role.name}: Assigned {len(permission_codes)} permissions")
|
||||
|
||||
# Step 6: Summary
|
||||
print("\n" + "=" * 80)
|
||||
print("📊 SEEDING SUMMARY")
|
||||
print("=" * 80)
|
||||
|
||||
# Count permissions by module
|
||||
modules = {}
|
||||
for perm in PERMISSIONS:
|
||||
module = perm["module"]
|
||||
modules[module] = modules.get(module, 0) + 1
|
||||
|
||||
print("\nPermissions by module:")
|
||||
for module, count in sorted(modules.items()):
|
||||
print(f" • {module.capitalize()}: {count} permissions")
|
||||
|
||||
print(f"\nTotal system roles created: {len(DEFAULT_ROLES)}")
|
||||
print(f"Total permissions created: {len(PERMISSIONS)}")
|
||||
print(f"Total role-permission mappings: {total_assigned}")
|
||||
print("\n✅ Permission seeding completed successfully!")
|
||||
print("\nNext step: Restart backend server")
|
||||
print("=" * 80)
|
||||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
print(f"\n❌ Error seeding permissions: {str(e)}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
raise
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
seed_permissions()
|
||||
4652
server.py.bak
Normal file
4652
server.py.bak
Normal file
File diff suppressed because it is too large
Load Diff
624
status_transitions.py
Normal file
624
status_transitions.py
Normal file
@@ -0,0 +1,624 @@
|
||||
"""
|
||||
Membership Status Transition Logic
|
||||
|
||||
This module handles all user status transitions, validation, and automated rules.
|
||||
Ensures state machine integrity and prevents invalid status changes.
|
||||
"""
|
||||
|
||||
from models import UserStatus, UserRole
|
||||
from typing import Optional, Dict, List
|
||||
from datetime import datetime, timezone
|
||||
import logging
|
||||
|
||||
# Configure logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Define valid status transitions (state machine)
|
||||
ALLOWED_TRANSITIONS: Dict[UserStatus, List[UserStatus]] = {
|
||||
UserStatus.pending_email: [
|
||||
UserStatus.pending_validation, # Email verified (normal flow)
|
||||
UserStatus.pre_validated, # Email verified + referred by member
|
||||
UserStatus.abandoned, # Timeout without verification (optional)
|
||||
],
|
||||
UserStatus.pending_validation: [
|
||||
UserStatus.pre_validated, # Attended event
|
||||
UserStatus.abandoned, # 90-day timeout without event
|
||||
],
|
||||
UserStatus.pre_validated: [
|
||||
UserStatus.payment_pending, # Admin validates application
|
||||
UserStatus.inactive, # Admin rejects (rare)
|
||||
],
|
||||
UserStatus.payment_pending: [
|
||||
UserStatus.active, # Payment successful
|
||||
UserStatus.abandoned, # Timeout without payment (optional)
|
||||
],
|
||||
UserStatus.active: [
|
||||
UserStatus.expired, # Subscription ended
|
||||
UserStatus.canceled, # User/admin cancels
|
||||
UserStatus.inactive, # Admin deactivates
|
||||
],
|
||||
UserStatus.inactive: [
|
||||
UserStatus.active, # Admin reactivates
|
||||
UserStatus.payment_pending, # Admin prompts for payment
|
||||
],
|
||||
UserStatus.canceled: [
|
||||
UserStatus.payment_pending, # User requests to rejoin
|
||||
UserStatus.active, # Admin reactivates with subscription
|
||||
],
|
||||
UserStatus.expired: [
|
||||
UserStatus.payment_pending, # User chooses to renew
|
||||
UserStatus.active, # Admin manually renews
|
||||
],
|
||||
UserStatus.abandoned: [
|
||||
UserStatus.pending_email, # Admin resets - resend verification
|
||||
UserStatus.pending_validation, # Admin resets - manual email verify
|
||||
UserStatus.payment_pending, # Admin resets - bypass requirements
|
||||
],
|
||||
}
|
||||
|
||||
# Define role mappings for each status
|
||||
STATUS_ROLE_MAP: Dict[UserStatus, UserRole] = {
|
||||
UserStatus.pending_email: UserRole.guest,
|
||||
UserStatus.pending_validation: UserRole.guest,
|
||||
UserStatus.pre_validated: UserRole.guest,
|
||||
UserStatus.payment_pending: UserRole.guest,
|
||||
UserStatus.active: UserRole.member,
|
||||
UserStatus.inactive: UserRole.guest,
|
||||
UserStatus.canceled: UserRole.guest,
|
||||
UserStatus.expired: UserRole.guest,
|
||||
UserStatus.abandoned: UserRole.guest,
|
||||
}
|
||||
|
||||
# Define newsletter subscription rules for each status
|
||||
NEWSLETTER_SUBSCRIBED_STATUSES = {
|
||||
UserStatus.pending_validation,
|
||||
UserStatus.pre_validated,
|
||||
UserStatus.payment_pending,
|
||||
UserStatus.active,
|
||||
}
|
||||
|
||||
|
||||
class StatusTransitionError(Exception):
|
||||
"""Raised when an invalid status transition is attempted"""
|
||||
pass
|
||||
|
||||
|
||||
def is_transition_allowed(from_status: UserStatus, to_status: UserStatus) -> bool:
|
||||
"""
|
||||
Check if a status transition is allowed by the state machine.
|
||||
|
||||
Args:
|
||||
from_status: Current user status
|
||||
to_status: Target user status
|
||||
|
||||
Returns:
|
||||
True if transition is allowed, False otherwise
|
||||
"""
|
||||
if from_status not in ALLOWED_TRANSITIONS:
|
||||
logger.warning(f"Unknown source status: {from_status}")
|
||||
return False
|
||||
|
||||
return to_status in ALLOWED_TRANSITIONS[from_status]
|
||||
|
||||
|
||||
def get_allowed_transitions(current_status: UserStatus) -> List[UserStatus]:
|
||||
"""
|
||||
Get list of allowed next statuses for the current status.
|
||||
|
||||
Args:
|
||||
current_status: Current user status
|
||||
|
||||
Returns:
|
||||
List of allowed target statuses
|
||||
"""
|
||||
return ALLOWED_TRANSITIONS.get(current_status, [])
|
||||
|
||||
|
||||
def get_role_for_status(status: UserStatus) -> UserRole:
|
||||
"""
|
||||
Get the appropriate role for a given status.
|
||||
|
||||
Args:
|
||||
status: User status
|
||||
|
||||
Returns:
|
||||
Corresponding UserRole
|
||||
"""
|
||||
return STATUS_ROLE_MAP.get(status, UserRole.guest)
|
||||
|
||||
|
||||
def should_subscribe_newsletter(status: UserStatus) -> bool:
|
||||
"""
|
||||
Determine if user should be subscribed to newsletter for given status.
|
||||
|
||||
Args:
|
||||
status: User status
|
||||
|
||||
Returns:
|
||||
True if user should receive newsletter
|
||||
"""
|
||||
return status in NEWSLETTER_SUBSCRIBED_STATUSES
|
||||
|
||||
|
||||
def transition_user_status(
|
||||
user,
|
||||
new_status: UserStatus,
|
||||
reason: Optional[str] = None,
|
||||
admin_id: Optional[str] = None,
|
||||
db_session = None,
|
||||
send_notification: bool = True
|
||||
) -> Dict[str, any]:
|
||||
"""
|
||||
Transition a user to a new status with validation and side effects.
|
||||
|
||||
Args:
|
||||
user: User object (SQLAlchemy model instance)
|
||||
new_status: Target status to transition to
|
||||
reason: Optional reason for the transition
|
||||
admin_id: Optional admin user ID if transition is manual
|
||||
db_session: SQLAlchemy database session
|
||||
send_notification: Whether to send email notification (default True)
|
||||
|
||||
Returns:
|
||||
Dictionary with transition details:
|
||||
{
|
||||
'success': bool,
|
||||
'old_status': str,
|
||||
'new_status': str,
|
||||
'role_changed': bool,
|
||||
'newsletter_changed': bool,
|
||||
'message': str
|
||||
}
|
||||
|
||||
Raises:
|
||||
StatusTransitionError: If transition is not allowed
|
||||
"""
|
||||
old_status = user.status
|
||||
old_role = user.role
|
||||
old_newsletter = user.newsletter_subscribed
|
||||
|
||||
# Validate transition
|
||||
if not is_transition_allowed(old_status, new_status):
|
||||
allowed = get_allowed_transitions(old_status)
|
||||
allowed_names = [s.value for s in allowed]
|
||||
error_msg = (
|
||||
f"Invalid status transition: {old_status.value} → {new_status.value}. "
|
||||
f"Allowed transitions from {old_status.value}: {allowed_names}"
|
||||
)
|
||||
logger.error(error_msg)
|
||||
raise StatusTransitionError(error_msg)
|
||||
|
||||
# Update status
|
||||
user.status = new_status
|
||||
|
||||
# Update role based on new status
|
||||
new_role = get_role_for_status(new_status)
|
||||
role_changed = new_role != old_role
|
||||
if role_changed:
|
||||
user.role = new_role
|
||||
|
||||
# Update newsletter subscription
|
||||
should_subscribe = should_subscribe_newsletter(new_status)
|
||||
newsletter_changed = should_subscribe != old_newsletter
|
||||
if newsletter_changed:
|
||||
user.newsletter_subscribed = should_subscribe
|
||||
|
||||
# Update timestamp
|
||||
user.updated_at = datetime.now(timezone.utc)
|
||||
|
||||
# Log the transition
|
||||
logger.info(
|
||||
f"Status transition: user_id={user.id}, "
|
||||
f"{old_status.value} → {new_status.value}, "
|
||||
f"reason={reason}, admin_id={admin_id}"
|
||||
)
|
||||
|
||||
# Commit to database if session provided
|
||||
if db_session:
|
||||
db_session.commit()
|
||||
|
||||
# Prepare notification email (actual sending should be done by caller)
|
||||
# This is just a flag - the API endpoint should handle the actual email
|
||||
notification_needed = send_notification
|
||||
|
||||
# Build result
|
||||
result = {
|
||||
'success': True,
|
||||
'old_status': old_status.value,
|
||||
'new_status': new_status.value,
|
||||
'old_role': old_role.value,
|
||||
'new_role': new_role.value,
|
||||
'role_changed': role_changed,
|
||||
'old_newsletter': old_newsletter,
|
||||
'new_newsletter': should_subscribe,
|
||||
'newsletter_changed': newsletter_changed,
|
||||
'message': f'Successfully transitioned from {old_status.value} to {new_status.value}',
|
||||
'notification_needed': notification_needed,
|
||||
'reason': reason,
|
||||
'admin_id': admin_id
|
||||
}
|
||||
|
||||
logger.info(f"Transition result: {result}")
|
||||
return result
|
||||
|
||||
|
||||
def get_status_metadata(status: UserStatus) -> Dict[str, any]:
|
||||
"""
|
||||
Get metadata about a status including permissions and properties.
|
||||
|
||||
Args:
|
||||
status: User status
|
||||
|
||||
Returns:
|
||||
Dictionary with status metadata
|
||||
"""
|
||||
return {
|
||||
'status': status.value,
|
||||
'role': get_role_for_status(status).value,
|
||||
'newsletter_subscribed': should_subscribe_newsletter(status),
|
||||
'allowed_transitions': [s.value for s in get_allowed_transitions(status)],
|
||||
'can_login': status != UserStatus.pending_email and status != UserStatus.abandoned,
|
||||
'has_member_access': status == UserStatus.active,
|
||||
'is_pending': status in {
|
||||
UserStatus.pending_email,
|
||||
UserStatus.pending_validation,
|
||||
UserStatus.pre_validated,
|
||||
UserStatus.payment_pending
|
||||
},
|
||||
'is_terminated': status in {
|
||||
UserStatus.canceled,
|
||||
UserStatus.expired,
|
||||
UserStatus.abandoned,
|
||||
UserStatus.inactive
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# Helper functions for common transitions
|
||||
|
||||
def verify_email(user, db_session=None, is_referred: bool = False):
|
||||
"""
|
||||
Transition user after email verification.
|
||||
|
||||
Args:
|
||||
user: User object
|
||||
db_session: Database session
|
||||
is_referred: Whether user was referred by a member
|
||||
|
||||
Returns:
|
||||
Transition result dict
|
||||
"""
|
||||
target_status = UserStatus.pre_validated if is_referred else UserStatus.pending_validation
|
||||
return transition_user_status(
|
||||
user=user,
|
||||
new_status=target_status,
|
||||
reason="Email verified" + (" (referred by member)" if is_referred else ""),
|
||||
db_session=db_session,
|
||||
send_notification=True
|
||||
)
|
||||
|
||||
|
||||
def mark_event_attendance(user, admin_id: str, db_session=None):
|
||||
"""
|
||||
Transition user after attending an event.
|
||||
|
||||
Args:
|
||||
user: User object
|
||||
admin_id: ID of admin marking attendance
|
||||
db_session: Database session
|
||||
|
||||
Returns:
|
||||
Transition result dict
|
||||
"""
|
||||
return transition_user_status(
|
||||
user=user,
|
||||
new_status=UserStatus.pre_validated,
|
||||
reason="Attended event",
|
||||
admin_id=admin_id,
|
||||
db_session=db_session,
|
||||
send_notification=False # Event attendance doesn't need immediate email
|
||||
)
|
||||
|
||||
|
||||
def validate_application(user, admin_id: str, db_session=None):
|
||||
"""
|
||||
Admin validates user application (formerly "approve").
|
||||
|
||||
Args:
|
||||
user: User object
|
||||
admin_id: ID of admin validating application
|
||||
db_session: Database session
|
||||
|
||||
Returns:
|
||||
Transition result dict
|
||||
"""
|
||||
return transition_user_status(
|
||||
user=user,
|
||||
new_status=UserStatus.payment_pending,
|
||||
reason="Application validated by admin",
|
||||
admin_id=admin_id,
|
||||
db_session=db_session,
|
||||
send_notification=True # Send payment instructions email
|
||||
)
|
||||
|
||||
|
||||
def activate_membership(user, admin_id: Optional[str] = None, db_session=None):
|
||||
"""
|
||||
Activate membership after payment or manual activation.
|
||||
|
||||
Args:
|
||||
user: User object
|
||||
admin_id: Optional ID of admin (for manual activation)
|
||||
db_session: Database session
|
||||
|
||||
Returns:
|
||||
Transition result dict
|
||||
"""
|
||||
reason = "Payment successful" if not admin_id else "Manually activated by admin"
|
||||
return transition_user_status(
|
||||
user=user,
|
||||
new_status=UserStatus.active,
|
||||
reason=reason,
|
||||
admin_id=admin_id,
|
||||
db_session=db_session,
|
||||
send_notification=True # Send welcome email
|
||||
)
|
||||
|
||||
|
||||
def cancel_membership(user, admin_id: Optional[str] = None, reason: str = None, db_session=None):
|
||||
"""
|
||||
Cancel membership.
|
||||
|
||||
Args:
|
||||
user: User object
|
||||
admin_id: Optional ID of admin (if admin canceled)
|
||||
reason: Optional cancellation reason
|
||||
db_session: Database session
|
||||
|
||||
Returns:
|
||||
Transition result dict
|
||||
"""
|
||||
cancel_reason = reason or ("Canceled by admin" if admin_id else "Canceled by user")
|
||||
return transition_user_status(
|
||||
user=user,
|
||||
new_status=UserStatus.canceled,
|
||||
reason=cancel_reason,
|
||||
admin_id=admin_id,
|
||||
db_session=db_session,
|
||||
send_notification=True # Send cancellation confirmation
|
||||
)
|
||||
|
||||
|
||||
def expire_membership(user, db_session=None):
|
||||
"""
|
||||
Expire membership when subscription ends.
|
||||
|
||||
Args:
|
||||
user: User object
|
||||
db_session: Database session
|
||||
|
||||
Returns:
|
||||
Transition result dict
|
||||
"""
|
||||
return transition_user_status(
|
||||
user=user,
|
||||
new_status=UserStatus.expired,
|
||||
reason="Subscription ended",
|
||||
db_session=db_session,
|
||||
send_notification=True # Send renewal prompt email
|
||||
)
|
||||
|
||||
|
||||
def abandon_application(user, reason: str, db_session=None):
|
||||
"""
|
||||
Mark application as abandoned due to timeout.
|
||||
|
||||
Args:
|
||||
user: User object
|
||||
reason: Reason for abandonment (e.g., "Email verification timeout")
|
||||
db_session: Database session
|
||||
|
||||
Returns:
|
||||
Transition result dict
|
||||
"""
|
||||
return transition_user_status(
|
||||
user=user,
|
||||
new_status=UserStatus.abandoned,
|
||||
reason=reason,
|
||||
db_session=db_session,
|
||||
send_notification=True # Send "incomplete application" notice
|
||||
)
|
||||
|
||||
|
||||
def reactivate_user(user, target_status: UserStatus, admin_id: str, reason: str = None, db_session=None):
|
||||
"""
|
||||
Reactivate user from terminated status (admin action).
|
||||
|
||||
Args:
|
||||
user: User object
|
||||
target_status: Status to transition to
|
||||
admin_id: ID of admin performing reactivation
|
||||
reason: Optional reason for reactivation
|
||||
db_session: Database session
|
||||
|
||||
Returns:
|
||||
Transition result dict
|
||||
"""
|
||||
reactivation_reason = reason or f"Reactivated by admin to {target_status.value}"
|
||||
return transition_user_status(
|
||||
user=user,
|
||||
new_status=target_status,
|
||||
reason=reactivation_reason,
|
||||
admin_id=admin_id,
|
||||
db_session=db_session,
|
||||
send_notification=True
|
||||
)
|
||||
|
||||
|
||||
# Background job functions (to be called by scheduler)
|
||||
|
||||
def check_pending_email_timeouts(db_session, timeout_days: int = 30):
|
||||
"""
|
||||
Check for users in pending_email status past timeout and transition to abandoned.
|
||||
|
||||
This should be run as a daily background job.
|
||||
|
||||
Args:
|
||||
db_session: Database session
|
||||
timeout_days: Number of days before abandonment (0 = disabled)
|
||||
|
||||
Returns:
|
||||
Number of users transitioned
|
||||
"""
|
||||
if timeout_days <= 0:
|
||||
return 0
|
||||
|
||||
from datetime import timedelta
|
||||
from models import User
|
||||
|
||||
cutoff_date = datetime.now(timezone.utc) - timedelta(days=timeout_days)
|
||||
|
||||
# Find users in pending_email status created before cutoff
|
||||
timeout_users = db_session.query(User).filter(
|
||||
User.status == UserStatus.pending_email,
|
||||
User.created_at < cutoff_date,
|
||||
User.email_verified == False
|
||||
).all()
|
||||
|
||||
count = 0
|
||||
for user in timeout_users:
|
||||
try:
|
||||
abandon_application(
|
||||
user=user,
|
||||
reason=f"Email verification timeout ({timeout_days} days)",
|
||||
db_session=db_session
|
||||
)
|
||||
count += 1
|
||||
logger.info(f"Abandoned user {user.id} due to email verification timeout")
|
||||
except Exception as e:
|
||||
logger.error(f"Error abandoning user {user.id}: {str(e)}")
|
||||
|
||||
return count
|
||||
|
||||
|
||||
def check_event_attendance_timeouts(db_session, timeout_days: int = 90):
|
||||
"""
|
||||
Check for users in pending_validation status past 90-day timeout.
|
||||
|
||||
This should be run as a daily background job.
|
||||
|
||||
Args:
|
||||
db_session: Database session
|
||||
timeout_days: Number of days before abandonment (default 90 per policy)
|
||||
|
||||
Returns:
|
||||
Number of users transitioned
|
||||
"""
|
||||
from datetime import timedelta
|
||||
from models import User
|
||||
|
||||
cutoff_date = datetime.now(timezone.utc) - timedelta(days=timeout_days)
|
||||
|
||||
# Find users in pending_validation status past deadline
|
||||
# Note: We check updated_at (when they entered this status) not created_at
|
||||
timeout_users = db_session.query(User).filter(
|
||||
User.status == UserStatus.pending_validation,
|
||||
User.updated_at < cutoff_date
|
||||
).all()
|
||||
|
||||
count = 0
|
||||
for user in timeout_users:
|
||||
try:
|
||||
abandon_application(
|
||||
user=user,
|
||||
reason=f"Event attendance timeout ({timeout_days} days)",
|
||||
db_session=db_session
|
||||
)
|
||||
count += 1
|
||||
logger.info(f"Abandoned user {user.id} due to event attendance timeout")
|
||||
except Exception as e:
|
||||
logger.error(f"Error abandoning user {user.id}: {str(e)}")
|
||||
|
||||
return count
|
||||
|
||||
|
||||
def check_payment_timeouts(db_session, timeout_days: int = 0):
|
||||
"""
|
||||
Check for users in payment_pending status past timeout.
|
||||
|
||||
This should be run as a daily background job.
|
||||
Default timeout_days=0 means never auto-abandon (recommended).
|
||||
|
||||
Args:
|
||||
db_session: Database session
|
||||
timeout_days: Number of days before abandonment (0 = disabled)
|
||||
|
||||
Returns:
|
||||
Number of users transitioned
|
||||
"""
|
||||
if timeout_days <= 0:
|
||||
return 0 # Disabled by default
|
||||
|
||||
from datetime import timedelta
|
||||
from models import User
|
||||
|
||||
cutoff_date = datetime.now(timezone.utc) - timedelta(days=timeout_days)
|
||||
|
||||
timeout_users = db_session.query(User).filter(
|
||||
User.status == UserStatus.payment_pending,
|
||||
User.updated_at < cutoff_date
|
||||
).all()
|
||||
|
||||
count = 0
|
||||
for user in timeout_users:
|
||||
try:
|
||||
abandon_application(
|
||||
user=user,
|
||||
reason=f"Payment timeout ({timeout_days} days)",
|
||||
db_session=db_session
|
||||
)
|
||||
count += 1
|
||||
logger.info(f"Abandoned user {user.id} due to payment timeout")
|
||||
except Exception as e:
|
||||
logger.error(f"Error abandoning user {user.id}: {str(e)}")
|
||||
|
||||
return count
|
||||
|
||||
|
||||
def check_subscription_expirations(db_session):
|
||||
"""
|
||||
Check for active subscriptions past end_date and transition to expired.
|
||||
|
||||
This should be run as a daily background job.
|
||||
|
||||
Args:
|
||||
db_session: Database session
|
||||
|
||||
Returns:
|
||||
Number of users transitioned
|
||||
"""
|
||||
from models import User, Subscription
|
||||
from sqlalchemy import and_
|
||||
|
||||
today = datetime.now(timezone.utc).date()
|
||||
|
||||
# Find active users with expired subscriptions
|
||||
expired_subs = db_session.query(User, Subscription).join(
|
||||
Subscription, User.id == Subscription.user_id
|
||||
).filter(
|
||||
and_(
|
||||
User.status == UserStatus.active,
|
||||
Subscription.end_date < today
|
||||
)
|
||||
).all()
|
||||
|
||||
count = 0
|
||||
for user, subscription in expired_subs:
|
||||
try:
|
||||
expire_membership(user=user, db_session=db_session)
|
||||
count += 1
|
||||
logger.info(f"Expired user {user.id} - subscription ended {subscription.end_date}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error expiring user {user.id}: {str(e)}")
|
||||
|
||||
return count
|
||||
115
update_permissions.py
Normal file
115
update_permissions.py
Normal file
@@ -0,0 +1,115 @@
|
||||
"""
|
||||
Script to update admin endpoints with permission checks
|
||||
Replaces get_current_admin_user with require_permission calls
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
# Mapping of endpoint patterns to permissions
|
||||
ENDPOINT_PERMISSIONS = {
|
||||
# Calendar
|
||||
r'POST /admin/calendar/sync': 'events.edit',
|
||||
r'DELETE /admin/calendar/unsync': 'events.edit',
|
||||
|
||||
# Event Gallery
|
||||
r'POST /admin/events/\{event_id\}/gallery': 'gallery.upload',
|
||||
r'DELETE /admin/event-gallery': 'gallery.delete',
|
||||
r'PUT /admin/event-gallery': 'gallery.edit',
|
||||
|
||||
# Storage
|
||||
r'GET /admin/storage/usage': 'settings.storage',
|
||||
r'GET /admin/storage/breakdown': 'settings.storage',
|
||||
|
||||
# User Management (remaining)
|
||||
r'PUT /admin/users/\{user_id\}/reset-password': 'users.reset_password',
|
||||
r'POST /admin/users/\{user_id\}/resend-verification': 'users.resend_verification',
|
||||
|
||||
# Events
|
||||
r'POST /admin/events(?!/)': 'events.create', # Not followed by /
|
||||
r'PUT /admin/events/\{event_id\}': 'events.edit',
|
||||
r'GET /admin/events/\{event_id\}/rsvps': 'events.rsvps',
|
||||
r'PUT /admin/events/\{event_id\}/attendance': 'events.attendance',
|
||||
r'GET /admin/events(?!/)': 'events.view', # Not followed by /
|
||||
r'DELETE /admin/events': 'events.delete',
|
||||
|
||||
# Subscriptions
|
||||
r'GET /admin/subscriptions/plans(?!/)': 'subscriptions.view',
|
||||
r'GET /admin/subscriptions/plans/\{plan_id\}': 'subscriptions.view',
|
||||
r'POST /admin/subscriptions/plans': 'subscriptions.plans',
|
||||
r'PUT /admin/subscriptions/plans': 'subscriptions.plans',
|
||||
r'DELETE /admin/subscriptions/plans': 'subscriptions.plans',
|
||||
r'GET /admin/subscriptions/stats': 'subscriptions.view',
|
||||
r'GET /admin/subscriptions(?!/)': 'subscriptions.view',
|
||||
r'PUT /admin/subscriptions/\{subscription_id\}': 'subscriptions.edit',
|
||||
r'POST /admin/subscriptions/\{subscription_id\}/cancel': 'subscriptions.cancel',
|
||||
|
||||
# Newsletters
|
||||
r'POST /admin/newsletters': 'newsletters.create',
|
||||
r'PUT /admin/newsletters': 'newsletters.edit',
|
||||
r'DELETE /admin/newsletters': 'newsletters.delete',
|
||||
|
||||
# Financials
|
||||
r'POST /admin/financials': 'financials.create',
|
||||
r'PUT /admin/financials': 'financials.edit',
|
||||
r'DELETE /admin/financials': 'financials.delete',
|
||||
|
||||
# Bylaws
|
||||
r'POST /admin/bylaws': 'bylaws.create',
|
||||
r'PUT /admin/bylaws': 'bylaws.edit',
|
||||
r'DELETE /admin/bylaws': 'bylaws.delete',
|
||||
}
|
||||
|
||||
def update_server_file():
|
||||
"""Read server.py, update permissions, write back"""
|
||||
|
||||
with open('server.py', 'r') as f:
|
||||
content = f.read()
|
||||
|
||||
# Track changes
|
||||
changes_made = 0
|
||||
|
||||
# Find all admin endpoints that still use get_current_admin_user
|
||||
pattern = r'(@api_router\.(get|post|put|delete)\("(/admin/[^"]+)"\)[^@]+?)current_user: User = Depends\(get_current_admin_user\)'
|
||||
|
||||
def replace_permission(match):
|
||||
nonlocal changes_made
|
||||
full_match = match.group(0)
|
||||
method = match.group(2).upper()
|
||||
route = match.group(3)
|
||||
endpoint_pattern = f'{method} {route}'
|
||||
|
||||
# Find matching permission
|
||||
permission = None
|
||||
for pattern_key, perm_value in ENDPOINT_PERMISSIONS.items():
|
||||
if re.search(pattern_key, endpoint_pattern):
|
||||
permission = perm_value
|
||||
break
|
||||
|
||||
if permission:
|
||||
changes_made += 1
|
||||
replacement = full_match.replace(
|
||||
'current_user: User = Depends(get_current_admin_user)',
|
||||
f'current_user: User = Depends(require_permission("{permission}"))'
|
||||
)
|
||||
print(f'✓ Updated {endpoint_pattern} → {permission}')
|
||||
return replacement
|
||||
else:
|
||||
print(f'⚠ No permission mapping for: {endpoint_pattern}')
|
||||
return full_match
|
||||
|
||||
# Perform replacements
|
||||
new_content = re.sub(pattern, replace_permission, content, flags=re.DOTALL)
|
||||
|
||||
if changes_made > 0:
|
||||
with open('server.py', 'w') as f:
|
||||
f.write(new_content)
|
||||
print(f'\n✅ Updated {changes_made} endpoints with permission checks')
|
||||
else:
|
||||
print('\n⚠ No changes made')
|
||||
|
||||
return changes_made
|
||||
|
||||
if __name__ == '__main__':
|
||||
print('🔧 Updating admin endpoints with permission checks...\n')
|
||||
changes = update_server_file()
|
||||
print(f'\nDone! Updated {changes} endpoints.')
|
||||
113
verify_admin_account.py
Normal file
113
verify_admin_account.py
Normal file
@@ -0,0 +1,113 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Script to verify admin@loaf.org account configuration after RBAC migration
|
||||
"""
|
||||
import sys
|
||||
import os
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from dotenv import load_dotenv
|
||||
|
||||
# Add parent directory to path to import models
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
from models import User, Role, Permission, RolePermission
|
||||
from database import DATABASE_URL
|
||||
|
||||
# Load environment variables
|
||||
load_dotenv()
|
||||
|
||||
# Create database engine and session
|
||||
engine = create_engine(DATABASE_URL)
|
||||
Session = sessionmaker(bind=engine)
|
||||
db = Session()
|
||||
|
||||
def verify_admin_account():
|
||||
print("=" * 80)
|
||||
print("VERIFYING admin@loaf.org ACCOUNT")
|
||||
print("=" * 80)
|
||||
|
||||
# Find the user
|
||||
user = db.query(User).filter(User.email == "admin@loaf.org").first()
|
||||
|
||||
if not user:
|
||||
print("\n❌ ERROR: User 'admin@loaf.org' not found in database!")
|
||||
return
|
||||
|
||||
print(f"\n✅ User found: {user.first_name} {user.last_name}")
|
||||
print(f" Email: {user.email}")
|
||||
print(f" Status: {user.status}")
|
||||
print(f" Email Verified: {user.email_verified}")
|
||||
|
||||
# Check legacy role enum
|
||||
print(f"\n📋 Legacy Role (enum): {user.role.value if user.role else 'None'}")
|
||||
|
||||
# Check new dynamic role
|
||||
if user.role_id:
|
||||
role = db.query(Role).filter(Role.id == user.role_id).first()
|
||||
if role:
|
||||
print(f"✅ Dynamic Role: {role.name} (code: {role.code})")
|
||||
print(f" Role ID: {role.id}")
|
||||
print(f" Is System Role: {role.is_system_role}")
|
||||
else:
|
||||
print(f"❌ ERROR: role_id set to {user.role_id} but role not found!")
|
||||
else:
|
||||
print("⚠️ WARNING: No dynamic role_id set")
|
||||
|
||||
# Check permissions
|
||||
print("\n🔐 Checking Permissions:")
|
||||
|
||||
# Get all permissions for this role
|
||||
if user.role_id:
|
||||
role_perms = db.query(RolePermission).filter(
|
||||
RolePermission.role_id == user.role_id
|
||||
).all()
|
||||
|
||||
print(f" Total permissions assigned to role: {len(role_perms)}")
|
||||
|
||||
if len(role_perms) > 0:
|
||||
print("\n Sample permissions:")
|
||||
for rp in role_perms[:10]: # Show first 10
|
||||
perm = db.query(Permission).filter(Permission.id == rp.permission_id).first()
|
||||
if perm:
|
||||
print(f" - {perm.code}: {perm.name}")
|
||||
if len(role_perms) > 10:
|
||||
print(f" ... and {len(role_perms) - 10} more")
|
||||
else:
|
||||
print(" ⚠️ WARNING: No permissions assigned to this role!")
|
||||
else:
|
||||
# Check legacy role permissions
|
||||
from auth import UserRole
|
||||
role_enum = user.role
|
||||
legacy_perms = db.query(RolePermission).filter(
|
||||
RolePermission.role == role_enum
|
||||
).all()
|
||||
print(f" Legacy permissions (via enum): {len(legacy_perms)}")
|
||||
|
||||
# Check if user should have access
|
||||
print("\n🎯 Access Check:")
|
||||
if user.role and user.role.value in ['admin', 'superadmin']:
|
||||
print(" ✅ User should have admin access (based on legacy enum)")
|
||||
else:
|
||||
print(" ❌ User does NOT have admin access (based on legacy enum)")
|
||||
|
||||
if user.role_id:
|
||||
role = db.query(Role).filter(Role.id == user.role_id).first()
|
||||
if role and role.code in ['admin', 'superadmin']:
|
||||
print(" ✅ User should have admin access (based on dynamic role)")
|
||||
else:
|
||||
print(" ❌ User does NOT have admin access (based on dynamic role)")
|
||||
|
||||
print("\n" + "=" * 80)
|
||||
print("VERIFICATION COMPLETE")
|
||||
print("=" * 80)
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
verify_admin_account()
|
||||
except Exception as e:
|
||||
print(f"\n❌ ERROR: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
finally:
|
||||
db.close()
|
||||
531
wordpress_parser.py
Normal file
531
wordpress_parser.py
Normal file
@@ -0,0 +1,531 @@
|
||||
"""
|
||||
WordPress CSV Parser Module
|
||||
|
||||
This module provides utilities for parsing WordPress user export CSV files
|
||||
and transforming them into LOAF platform-compatible data structures.
|
||||
|
||||
Key Features:
|
||||
- Parse PHP serialized data (WordPress capabilities)
|
||||
- Map WordPress roles to LOAF roles and statuses
|
||||
- Validate and standardize user data (DOB, phone numbers)
|
||||
- Generate smart status suggestions based on approval and subscription data
|
||||
- Comprehensive data quality analysis and error reporting
|
||||
|
||||
Author: Claude Code
|
||||
Date: 2025-12-24
|
||||
"""
|
||||
|
||||
import csv
|
||||
import re
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
import phpserialize
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# WordPress Role Mapping Configuration
|
||||
# ============================================================================
|
||||
|
||||
ROLE_MAPPING = {
|
||||
# WordPress admin roles → LOAF admin roles (auto-active)
|
||||
'administrator': ('superadmin', 'active'),
|
||||
'loaf_admin': ('admin', 'active'),
|
||||
'loaf_treasure': ('finance', 'active'),
|
||||
'loaf_communication': ('admin', 'active'),
|
||||
|
||||
# WordPress member roles → LOAF member role (status from approval)
|
||||
'pms_subscription_plan_63': ('member', None), # Status determined by approval
|
||||
'registered': ('guest', None), # Default WordPress role
|
||||
|
||||
# Fallback for unknown roles
|
||||
'__default__': ('guest', None)
|
||||
}
|
||||
|
||||
# Role priority order (higher index = higher priority)
|
||||
ROLE_PRIORITY = [
|
||||
'registered',
|
||||
'pms_subscription_plan_63',
|
||||
'loaf_communication',
|
||||
'loaf_treasure',
|
||||
'loaf_admin',
|
||||
'administrator'
|
||||
]
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# PHP Serialization Parsing
|
||||
# ============================================================================
|
||||
|
||||
def parse_php_serialized(data: str) -> List[str]:
|
||||
"""
|
||||
Parse WordPress PHP serialized capabilities string.
|
||||
|
||||
WordPress stores user capabilities as serialized PHP arrays like:
|
||||
a:1:{s:10:"registered";b:1;}
|
||||
a:2:{s:10:"registered";b:1;s:24:"pms_subscription_plan_63";b:1;}
|
||||
|
||||
Args:
|
||||
data: PHP serialized string
|
||||
|
||||
Returns:
|
||||
List of role names (e.g., ['registered', 'pms_subscription_plan_63'])
|
||||
|
||||
Examples:
|
||||
>>> parse_php_serialized('a:1:{s:10:"registered";b:1;}')
|
||||
['registered']
|
||||
>>> parse_php_serialized('a:2:{s:10:"registered";b:1;s:24:"pms_subscription_plan_63";b:1;}')
|
||||
['registered', 'pms_subscription_plan_63']
|
||||
"""
|
||||
if not data or pd.isna(data):
|
||||
return []
|
||||
|
||||
try:
|
||||
# Use phpserialize library to parse
|
||||
parsed = phpserialize.loads(data.encode('utf-8'))
|
||||
|
||||
# Extract role names (keys where value is True)
|
||||
if isinstance(parsed, dict):
|
||||
roles = [key.decode('utf-8') if isinstance(key, bytes) else key
|
||||
for key, value in parsed.items() if value]
|
||||
return roles
|
||||
|
||||
return []
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to parse PHP serialized data: {data[:50]}... Error: {str(e)}")
|
||||
return []
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Role and Status Mapping
|
||||
# ============================================================================
|
||||
|
||||
def map_wordpress_role(wp_roles: List[str]) -> Tuple[str, Optional[str]]:
|
||||
"""
|
||||
Map WordPress roles to LOAF role and suggested status.
|
||||
|
||||
Priority logic:
|
||||
1. If user has any admin role → corresponding LOAF admin role with 'active' status
|
||||
2. If user has subscription → 'member' role (status from approval)
|
||||
3. Otherwise → 'guest' role (status from approval)
|
||||
|
||||
Args:
|
||||
wp_roles: List of WordPress role names
|
||||
|
||||
Returns:
|
||||
Tuple of (loaf_role, suggested_status)
|
||||
- loaf_role: One of: superadmin, admin, finance, member, guest
|
||||
- suggested_status: One of: active, pre_validated, payment_pending, None (determined by approval)
|
||||
|
||||
Examples:
|
||||
>>> map_wordpress_role(['loaf_admin'])
|
||||
('admin', 'active')
|
||||
>>> map_wordpress_role(['loaf_treasure'])
|
||||
('finance', 'active')
|
||||
>>> map_wordpress_role(['pms_subscription_plan_63', 'registered'])
|
||||
('member', None)
|
||||
>>> map_wordpress_role(['registered'])
|
||||
('guest', None)
|
||||
"""
|
||||
if not wp_roles:
|
||||
return ROLE_MAPPING['__default__']
|
||||
|
||||
# Sort roles by priority (highest priority last)
|
||||
prioritized_roles = sorted(
|
||||
wp_roles,
|
||||
key=lambda r: ROLE_PRIORITY.index(r) if r in ROLE_PRIORITY else -1
|
||||
)
|
||||
|
||||
# Map highest priority role
|
||||
highest_role = prioritized_roles[-1] if prioritized_roles else 'registered'
|
||||
return ROLE_MAPPING.get(highest_role, ROLE_MAPPING['__default__'])
|
||||
|
||||
|
||||
def suggest_status(approval_status: str, has_subscription: bool, wordpress_role: str = 'guest') -> str:
|
||||
"""
|
||||
Suggest LOAF user status based on WordPress approval and subscription data.
|
||||
|
||||
Logic:
|
||||
1. Admin roles (loaf_admin, loaf_treasure, administrator) → always 'active'
|
||||
2. approved + subscription → 'active'
|
||||
3. approved without subscription → 'pre_validated'
|
||||
4. pending → 'payment_pending'
|
||||
5. Other/empty → 'pre_validated'
|
||||
|
||||
Args:
|
||||
approval_status: WordPress approval status (approved, pending, unapproved, etc.)
|
||||
has_subscription: Whether user has pms_subscription_plan_63 role
|
||||
wordpress_role: LOAF role mapped from WordPress (for admin check)
|
||||
|
||||
Returns:
|
||||
Suggested LOAF status: active, pre_validated, payment_pending, or inactive
|
||||
|
||||
Examples:
|
||||
>>> suggest_status('approved', True, 'member')
|
||||
'active'
|
||||
>>> suggest_status('approved', False, 'member')
|
||||
'pre_validated'
|
||||
>>> suggest_status('pending', True, 'member')
|
||||
'payment_pending'
|
||||
>>> suggest_status('', False, 'admin')
|
||||
'active'
|
||||
"""
|
||||
# Admin roles are always active
|
||||
if wordpress_role in ('superadmin', 'admin', 'finance'):
|
||||
return 'active'
|
||||
|
||||
# Normalize approval status
|
||||
approval = (approval_status or '').lower().strip()
|
||||
|
||||
if approval == 'approved':
|
||||
return 'active' if has_subscription else 'pre_validated'
|
||||
elif approval == 'pending':
|
||||
return 'payment_pending'
|
||||
elif approval == 'unapproved':
|
||||
return 'inactive'
|
||||
else:
|
||||
# Empty or unknown approval status
|
||||
return 'pre_validated'
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Data Validation and Standardization
|
||||
# ============================================================================
|
||||
|
||||
def standardize_phone(phone: str) -> str:
|
||||
"""
|
||||
Standardize phone number by extracting digits only.
|
||||
|
||||
Removes all non-digit characters:
|
||||
- (713) 560-7850 → 7135607850
|
||||
- 713-725-8902 → 7137258902
|
||||
- Empty/None → 0000000000 (fallback)
|
||||
|
||||
Args:
|
||||
phone: Phone number in any format
|
||||
|
||||
Returns:
|
||||
10-digit phone number string (or 0000000000 if invalid)
|
||||
|
||||
Examples:
|
||||
>>> standardize_phone('(713) 560-7850')
|
||||
'7135607850'
|
||||
>>> standardize_phone('713-725-8902')
|
||||
'7137258902'
|
||||
>>> standardize_phone('')
|
||||
'0000000000'
|
||||
"""
|
||||
if not phone or pd.isna(phone):
|
||||
return '0000000000'
|
||||
|
||||
# Extract all digits
|
||||
digits = re.sub(r'\D', '', str(phone))
|
||||
|
||||
# Return 10 digits or fallback
|
||||
if len(digits) == 10:
|
||||
return digits
|
||||
elif len(digits) == 11 and digits[0] == '1':
|
||||
# Remove leading 1 (US country code)
|
||||
return digits[1:]
|
||||
else:
|
||||
logger.warning(f"Invalid phone format: {phone} (extracted: {digits})")
|
||||
return '0000000000'
|
||||
|
||||
|
||||
def validate_dob(dob_str: str) -> Tuple[Optional[datetime], Optional[str]]:
|
||||
"""
|
||||
Validate and parse date of birth.
|
||||
|
||||
Validation rules:
|
||||
- Must be in MM/DD/YYYY format
|
||||
- Year must be between 1900 and current year
|
||||
- Cannot be in the future
|
||||
- Reject year 0000 or 2025+ (data quality issues in WordPress export)
|
||||
|
||||
Args:
|
||||
dob_str: Date of birth string in MM/DD/YYYY format
|
||||
|
||||
Returns:
|
||||
Tuple of (parsed_datetime, warning_message)
|
||||
- parsed_datetime: datetime object if valid, None if invalid
|
||||
- warning_message: Descriptive error message if invalid, None if valid
|
||||
|
||||
Examples:
|
||||
>>> validate_dob('08/02/1962')
|
||||
(datetime(1962, 8, 2), None)
|
||||
>>> validate_dob('08/02/0000')
|
||||
(None, 'Invalid year: 0000')
|
||||
>>> validate_dob('08/02/2025')
|
||||
(None, 'Date is in the future')
|
||||
"""
|
||||
if not dob_str or pd.isna(dob_str):
|
||||
return None, 'Missing date of birth'
|
||||
|
||||
try:
|
||||
# Parse MM/DD/YYYY format
|
||||
parsed = datetime.strptime(str(dob_str).strip(), '%m/%d/%Y')
|
||||
|
||||
# Validate year range
|
||||
if parsed.year == 0:
|
||||
return None, 'Invalid year: 0000 (data quality issue)'
|
||||
elif parsed.year < 1900:
|
||||
return None, f'Year too old: {parsed.year} (likely invalid)'
|
||||
elif parsed.year > datetime.now().year:
|
||||
return None, f'Date is in the future: {parsed.year}'
|
||||
elif parsed > datetime.now():
|
||||
return None, 'Date is in the future'
|
||||
|
||||
return parsed, None
|
||||
|
||||
except ValueError as e:
|
||||
return None, f'Invalid date format: {dob_str} (expected MM/DD/YYYY)'
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# CSV Analysis and Preview Generation
|
||||
# ============================================================================
|
||||
|
||||
def analyze_csv(file_path: str, existing_emails: Optional[set] = None) -> Dict:
|
||||
"""
|
||||
Analyze WordPress CSV file and generate preview data with status suggestions.
|
||||
|
||||
This is the main entry point for CSV processing. It:
|
||||
1. Reads and parses the CSV file
|
||||
2. Validates each row and generates warnings
|
||||
3. Maps WordPress roles to LOAF roles
|
||||
4. Suggests status for each user
|
||||
5. Tracks data quality metrics
|
||||
6. Checks for duplicate emails (both within CSV and against existing database)
|
||||
7. Returns comprehensive analysis and preview data
|
||||
|
||||
Args:
|
||||
file_path: Path to WordPress CSV export file
|
||||
existing_emails: Set of emails already in the database (optional)
|
||||
|
||||
Returns:
|
||||
Dictionary containing:
|
||||
- total_rows: Total number of user rows
|
||||
- valid_rows: Number of rows without critical errors
|
||||
- warnings: Total warning count
|
||||
- errors: Total critical error count
|
||||
- preview_data: List of row dictionaries with suggestions
|
||||
- data_quality: Dictionary of data quality metrics
|
||||
|
||||
Example output:
|
||||
{
|
||||
'total_rows': 183,
|
||||
'valid_rows': 176,
|
||||
'warnings': 66,
|
||||
'errors': 7,
|
||||
'preview_data': [
|
||||
{
|
||||
'row_number': 1,
|
||||
'email': 'user@example.com',
|
||||
'first_name': 'John',
|
||||
'last_name': 'Doe',
|
||||
'phone': '7135607850',
|
||||
'date_of_birth': '1962-08-02',
|
||||
'wordpress_roles': ['registered', 'pms_subscription_plan_63'],
|
||||
'suggested_role': 'member',
|
||||
'suggested_status': 'active',
|
||||
'warnings': [],
|
||||
'errors': []
|
||||
},
|
||||
...
|
||||
],
|
||||
'data_quality': {
|
||||
'invalid_dob': 66,
|
||||
'missing_phone': 7,
|
||||
'duplicate_email_csv': 0,
|
||||
'duplicate_email_db': 3,
|
||||
'unparseable_roles': 2
|
||||
}
|
||||
}
|
||||
"""
|
||||
import pandas as pd
|
||||
|
||||
# Read CSV with pandas
|
||||
df = pd.read_csv(file_path)
|
||||
|
||||
total_rows = len(df)
|
||||
preview_data = []
|
||||
data_quality = {
|
||||
'invalid_dob': 0,
|
||||
'missing_phone': 0,
|
||||
'duplicate_email_csv': 0,
|
||||
'duplicate_email_db': 0,
|
||||
'unparseable_roles': 0,
|
||||
'missing_email': 0
|
||||
}
|
||||
|
||||
# Track seen emails for CSV duplicate detection
|
||||
seen_emails = {}
|
||||
|
||||
# Convert existing_emails to set if provided
|
||||
if existing_emails is None:
|
||||
existing_emails = set()
|
||||
|
||||
for idx, row in df.iterrows():
|
||||
row_num = idx + 1
|
||||
warnings = []
|
||||
errors = []
|
||||
|
||||
# Extract and validate email
|
||||
email = str(row.get('user_email', '')).strip().lower()
|
||||
if not email or email == 'nan':
|
||||
errors.append('Missing email address')
|
||||
data_quality['missing_email'] += 1
|
||||
else:
|
||||
# Check for duplicates within CSV
|
||||
if email in seen_emails:
|
||||
errors.append(f'Duplicate email in CSV (also in row {seen_emails[email]})')
|
||||
data_quality['duplicate_email_csv'] += 1
|
||||
# Check for duplicates in existing database
|
||||
elif email in existing_emails:
|
||||
errors.append(f'Email already exists in database')
|
||||
data_quality['duplicate_email_db'] += 1
|
||||
else:
|
||||
seen_emails[email] = row_num
|
||||
|
||||
# Extract basic fields
|
||||
first_name = str(row.get('first_name', '')).strip()
|
||||
last_name = str(row.get('last_name', '')).strip()
|
||||
|
||||
# Parse and validate DOB
|
||||
dob_parsed, dob_warning = validate_dob(row.get('date_of_birth'))
|
||||
if dob_warning:
|
||||
warnings.append(dob_warning)
|
||||
data_quality['invalid_dob'] += 1
|
||||
|
||||
# Standardize phone
|
||||
phone = standardize_phone(row.get('cell_phone'))
|
||||
if phone == '0000000000':
|
||||
warnings.append('Missing or invalid phone number')
|
||||
data_quality['missing_phone'] += 1
|
||||
|
||||
# Parse WordPress roles
|
||||
wp_capabilities = row.get('wp_capabilities', '')
|
||||
wp_roles = parse_php_serialized(wp_capabilities)
|
||||
if not wp_roles and wp_capabilities:
|
||||
warnings.append('Could not parse WordPress roles')
|
||||
data_quality['unparseable_roles'] += 1
|
||||
|
||||
# Map to LOAF role and status
|
||||
loaf_role, role_suggested_status = map_wordpress_role(wp_roles)
|
||||
|
||||
# Determine if user has subscription
|
||||
has_subscription = 'pms_subscription_plan_63' in wp_roles
|
||||
|
||||
# Get approval status
|
||||
approval_status = str(row.get('wppb_approval_status', '')).strip()
|
||||
|
||||
# Suggest final status
|
||||
if role_suggested_status:
|
||||
# Admin roles have fixed status from role mapping
|
||||
suggested_status = role_suggested_status
|
||||
else:
|
||||
# Regular users get status from approval logic
|
||||
suggested_status = suggest_status(approval_status, has_subscription, loaf_role)
|
||||
|
||||
# Build preview row
|
||||
preview_row = {
|
||||
'row_number': row_num,
|
||||
'email': email,
|
||||
'first_name': first_name,
|
||||
'last_name': last_name,
|
||||
'phone': phone,
|
||||
'date_of_birth': dob_parsed.isoformat() if dob_parsed else None,
|
||||
'wordpress_user_id': int(row.get('ID', 0)) if pd.notna(row.get('ID')) else None,
|
||||
'wordpress_registered': str(row.get('user_registered', '')),
|
||||
'wordpress_roles': wp_roles,
|
||||
'wordpress_approval_status': approval_status,
|
||||
'has_subscription': has_subscription,
|
||||
'suggested_role': loaf_role,
|
||||
'suggested_status': suggested_status,
|
||||
'warnings': warnings,
|
||||
'errors': errors,
|
||||
'newsletter_consent': str(row.get('newsletter_consent', '')).lower() == 'yes',
|
||||
'newsletter_checklist': str(row.get('newsletter_checklist', '')).lower() == 'yes'
|
||||
}
|
||||
|
||||
preview_data.append(preview_row)
|
||||
|
||||
# Calculate summary statistics
|
||||
valid_rows = sum(1 for row in preview_data if not row['errors'])
|
||||
total_warnings = sum(len(row['warnings']) for row in preview_data)
|
||||
total_errors = sum(len(row['errors']) for row in preview_data)
|
||||
|
||||
return {
|
||||
'total_rows': total_rows,
|
||||
'valid_rows': valid_rows,
|
||||
'warnings': total_warnings,
|
||||
'errors': total_errors,
|
||||
'preview_data': preview_data,
|
||||
'data_quality': data_quality
|
||||
}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Utility Functions
|
||||
# ============================================================================
|
||||
|
||||
def get_status_badge_color(status: str) -> str:
|
||||
"""
|
||||
Get appropriate badge color for status display in UI.
|
||||
|
||||
Args:
|
||||
status: User status string
|
||||
|
||||
Returns:
|
||||
Tailwind CSS color class
|
||||
"""
|
||||
colors = {
|
||||
'active': 'bg-green-100 text-green-800',
|
||||
'pre_validated': 'bg-blue-100 text-blue-800',
|
||||
'payment_pending': 'bg-yellow-100 text-yellow-800',
|
||||
'inactive': 'bg-gray-100 text-gray-800',
|
||||
'pending_email': 'bg-purple-100 text-purple-800',
|
||||
'awaiting_event': 'bg-indigo-100 text-indigo-800'
|
||||
}
|
||||
return colors.get(status, 'bg-gray-100 text-gray-800')
|
||||
|
||||
|
||||
def format_preview_for_display(preview_data: List[Dict], page: int = 1, page_size: int = 50) -> Dict:
|
||||
"""
|
||||
Format preview data for paginated display in frontend.
|
||||
|
||||
Args:
|
||||
preview_data: Full preview data list
|
||||
page: Page number (1-indexed)
|
||||
page_size: Number of rows per page
|
||||
|
||||
Returns:
|
||||
Dictionary with paginated data and metadata
|
||||
"""
|
||||
total_pages = (len(preview_data) + page_size - 1) // page_size
|
||||
start_idx = (page - 1) * page_size
|
||||
end_idx = start_idx + page_size
|
||||
|
||||
return {
|
||||
'page': page,
|
||||
'page_size': page_size,
|
||||
'total_pages': total_pages,
|
||||
'total_rows': len(preview_data),
|
||||
'rows': preview_data[start_idx:end_idx]
|
||||
}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Module Initialization
|
||||
# ============================================================================
|
||||
|
||||
# Import pandas for CSV processing
|
||||
try:
|
||||
import pandas as pd
|
||||
except ImportError:
|
||||
logger.error("pandas library not found. Please install: pip install pandas")
|
||||
raise
|
||||
|
||||
logger.info("WordPress parser module loaded successfully")
|
||||
Reference in New Issue
Block a user