diff --git a/.gitignore b/.gitignore index c66ba3a..65cc126 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,308 @@ .env -.venv \ No newline at end of file +.venv +# ============================================================================ +# Python Backend .gitignore +# For FastAPI + PostgreSQL + Cloudflare R2 + Stripe +# ============================================================================ + +# ===== Environment Variables ===== +.env +.env.* +!.env.example +.envrc + +# ===== Python ===== +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +*.manifest +*.spec + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff (if ever added): +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff (if ever added): +instance/ +.webassets-cache + +# Scrapy stuff (if ever added): +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +Pipfile.lock + +# poetry +poetry.lock + +# pdm +.pdm.toml + +# PEP 582 +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# ===== Database ===== +# SQLite (development) +*.db +*.sqlite +*.sqlite3 + +# PostgreSQL dumps +*.sql.gz +*.dump + +# Database backups +backups/ +*.backup + +# ===== Alembic Migrations ===== +# Keep migration files but ignore bytecode +alembic/__pycache__/ +alembic/versions/__pycache__/ +# Keep alembic.ini, env.py, and all migration files in alembic/versions/ + +# ===== IDE / Editors ===== +# VSCode +.vscode/ +*.code-workspace + +# PyCharm +.idea/ +*.iml +*.ipr +*.iws + +# Sublime Text +*.sublime-project +*.sublime-workspace + +# Vim +*.swp +*.swo +*~ +.netrwhist + +# Emacs +*~ +\#*\# +/.emacs.desktop +/.emacs.desktop.lock +*.elc + +# Eclipse +.project +.pydevproject +.settings/ + +# ===== Operating System ===== +# macOS +.DS_Store +.AppleDouble +.LSOverride +._* +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Windows +Thumbs.db +Thumbs.db:encryptable +ehthumbs.db +ehthumbs_vista.db +*.stackdump +[Dd]esktop.ini +$RECYCLE.BIN/ +*.cab +*.msi +*.msix +*.msm +*.msp +*.lnk + +# Linux +.directory +.Trash-* +.nfs* + +# ===== Logs & Runtime ===== +*.log +logs/ +*.out +*.err +*.pid +*.seed +*.pid.lock + +# ===== Application-Specific ===== +# Uploaded files (R2 storage handles this) +uploads/ +temp_uploads/ +tmp/ +temporary/ + +# CSV imports +imports/*.csv +!imports/.gitkeep + +# Generated reports +reports/ +exports/ + +# Cache directories +.cache/ +cache/ + +# ===== Security & Secrets ===== +# API keys and secrets +secrets/ +*.pem +*.key +*.cert +*.crt +*.p12 +*.pfx + +# Stripe webhook secrets +stripe_*.txt + +# ===== Testing ===== +# Test databases +test.db +test_*.db + +# Test coverage +htmlcov/ +.coverage + +# ===== Miscellaneous ===== +# Backup files +*.bak +*.backup +*.old +*.orig + +# Compressed files +*.zip +*.tar.gz +*.rar + +# Temporary files +*.tmp +*.temp + +# Lock files +*.lock +!requirements.txt.lock + +# ===== Keep These ===== +# Keep these example/template files +!.env.example +!migrations/.gitkeep +!uploads/.gitkeep diff --git a/DATABASE_STATUS.md b/DATABASE_STATUS.md new file mode 100644 index 0000000..4085c20 --- /dev/null +++ b/DATABASE_STATUS.md @@ -0,0 +1,247 @@ +# Database Status - LOAF Membership Platform + +**Database:** `loaf_new` +**Host:** 10.9.23.11:54321 +**Last Updated:** 2026-01-03 +**Status:** ✅ Fully initialized with seed data + +--- + +## Database Summary + +### Tables (18 total) + +| Table Name | Status | Records | Purpose | +|------------|--------|---------|---------| +| ✅ alembic_version | Active | 1 | Migration tracking (001_initial_baseline) | +| ✅ users | Active | 0 | User accounts and profiles | +| ✅ events | Active | 0 | Event management | +| ✅ event_rsvps | Active | 0 | Event RSVPs and attendance | +| ✅ event_galleries | Active | 0 | Event photo galleries | +| ✅ roles | Active | 5 | RBAC role definitions | +| ✅ permissions | Active | 25 | RBAC permission definitions | +| ✅ role_permissions | Active | 49 | Role-permission mappings | +| ✅ user_invitations | Active | 0 | Admin invitation system | +| ✅ subscriptions | Active | 0 | User subscriptions | +| ✅ subscription_plans | Active | 3 | Available membership plans | +| ✅ donations | Active | 0 | Donation tracking | +| ✅ import_jobs | Active | 0 | CSV import tracking | +| ✅ import_rollback_audit | Active | 0 | Import rollback audit trail | +| ✅ newsletter_archives | Active | 0 | Newsletter document archive | +| ✅ financial_reports | Active | 0 | Financial document archive | +| ✅ bylaws_documents | Active | 0 | Bylaws document archive | +| ✅ storage_usage | Active | 1 | Storage quota tracking (100GB limit) | + +### ENUMs (8 total) + +| ENUM Name | Values | Used By | +|-----------|--------|---------| +| ✅ userstatus | pending_email, awaiting_event, pre_approved, payment_pending, active, inactive | users.status | +| ✅ userrole | guest, member, admin, finance, superadmin | users.role, user_invitations.role | +| ✅ rsvpstatus | yes, no, maybe | event_rsvps.rsvp_status | +| ✅ subscriptionstatus | active, past_due, canceled, incomplete, trialing | subscriptions.status | +| ✅ donationtype | one_time, recurring, pledge, in_kind, memorial | donations.donation_type | +| ✅ donationstatus | pending, completed, failed, refunded | donations.status | +| ✅ invitationstatus | pending, accepted, expired, revoked | user_invitations.status | +| ✅ importjobstatus | processing, completed, failed | import_jobs.status | + +--- + +## Seed Data Loaded + +### Roles (5) + +| Code | Name | System Role | Permissions | +|------|------|-------------|-------------| +| admin | Admin | Yes | 16 | +| finance | Finance | Yes | 7 | +| guest | Guest | Yes | 0 | +| member | Member | Yes | 1 | +| superadmin | Super Admin | Yes | 25 | + +### Permissions (25 across 5 modules) + +**Users Module (6 permissions):** +- users.view - View Users +- users.create - Create Users +- users.edit - Edit Users +- users.delete - Delete Users +- users.approve - Approve Users +- users.import - Import Users + +**Events Module (6 permissions):** +- events.view - View Events +- events.create - Create Events +- events.edit - Edit Events +- events.delete - Delete Events +- events.publish - Publish Events +- events.manage_attendance - Manage Attendance + +**Finance Module (5 permissions):** +- finance.view - View Financial Data +- finance.manage_plans - Manage Subscription Plans +- finance.manage_subscriptions - Manage Subscriptions +- finance.view_reports - View Financial Reports +- finance.export - Export Financial Data + +**Content Module (3 permissions):** +- content.newsletters - Manage Newsletters +- content.documents - Manage Documents +- content.gallery - Manage Gallery + +**System Module (5 permissions):** +- system.settings - System Settings +- system.roles - Manage Roles +- system.invitations - Manage Invitations +- system.storage - Manage Storage +- system.audit - View Audit Logs + +### Subscription Plans (3) + +| Plan Name | Price | Billing | Custom Pricing | Donation Support | +|-----------|-------|---------|----------------|------------------| +| Pay What You Want Membership | $30.00 (min) | Annual | ✅ Yes | ✅ Yes | +| Annual Individual Membership | $60.00 | Annual | ❌ No | ❌ No | +| Annual Group Membership | $100.00 | Annual | ❌ No | ❌ No | + +**Note:** Stripe price IDs need to be configured after Stripe setup. + +--- + +## Migration Status + +**Current Revision:** `001_initial_baseline (head)` +**Migration System:** Alembic 1.14.0 +**Schema Source:** `migrations/000_initial_schema.sql` +**Seed Source:** `migrations/seed_data.sql` + +**Migration History:** +- `001_initial_baseline` - Empty baseline marker (2026-01-02) + +**Future migrations** will be generated using: +```bash +alembic revision --autogenerate -m "description" +alembic upgrade head +``` + +--- + +## Next Steps + +### Immediate (Required) + +1. **Create Superadmin User** + ```bash + cd backend + python3 create_superadmin.py + ``` + +2. **Configure Stripe Price IDs** + ```sql + UPDATE subscription_plans + SET stripe_price_id = 'price_xxx' + WHERE name = 'Annual Individual Membership'; + + UPDATE subscription_plans + SET stripe_price_id = 'price_yyy' + WHERE name = 'Annual Group Membership'; + + UPDATE subscription_plans + SET stripe_price_id = 'price_zzz' + WHERE name = 'Pay What You Want Membership'; + ``` + +3. **Set Environment Variables** + - Copy `backend/.env.example` to `backend/.env` + - Fill in all required values (DATABASE_URL, JWT_SECRET, SMTP, Stripe, R2) + +4. **Test Application** + ```bash + # Backend + cd backend + uvicorn server:app --reload + + # Frontend (separate terminal) + cd frontend + yarn start + ``` + +### Optional (Recommended) + +1. **Add Sample Events** + - Login as superadmin + - Navigate to Admin → Events + - Create 2-3 sample events + +2. **Test Registration Flow** + - Register a test user + - Verify email verification works + - Test event RSVP + - Test admin approval flow + +3. **Configure Email Templates** + - Review templates in `backend/email_service.py` + - Customize colors, branding, copy + +4. **Set Up Monitoring** + - Configure error logging + - Set up uptime monitoring + - Configure backup schedule + +--- + +## Database Maintenance + +### Backup Command + +```bash +PGPASSWORD='your-password' pg_dump -h 10.9.23.11 -p 54321 -U postgres loaf_new > backup_$(date +%Y%m%d_%H%M%S).sql +``` + +### Restore Command + +```bash +PGPASSWORD='your-password' psql -h 10.9.23.11 -p 54321 -U postgres -d loaf_new < backup_file.sql +``` + +### Health Check Queries + +```sql +-- Check user count by status +SELECT status, COUNT(*) FROM users GROUP BY status; + +-- Check upcoming events +SELECT title, start_at FROM events WHERE start_at > NOW() ORDER BY start_at LIMIT 5; + +-- Check active subscriptions +SELECT COUNT(*) FROM subscriptions WHERE status = 'active'; + +-- Check storage usage +SELECT + total_bytes_used / 1024 / 1024 / 1024 as used_gb, + max_bytes_allowed / 1024 / 1024 / 1024 as max_gb, + ROUND((total_bytes_used::numeric / max_bytes_allowed * 100)::numeric, 2) as percent_used +FROM storage_usage; +``` + +--- + +## Support & Resources + +- **Deployment Guide:** See `DEPLOYMENT.md` for complete deployment instructions +- **API Documentation:** http://localhost:8000/docs (when backend running) +- **Alembic Guide:** See `backend/alembic/README.md` for migration documentation +- **Project Documentation:** See `CLAUDE.md` for codebase overview + +--- + +## Changelog + +**2026-01-03:** +- ✅ Created all 17 data tables +- ✅ Created all 8 ENUMs +- ✅ Loaded seed data (5 roles, 25 permissions, 3 subscription plans) +- ✅ Initialized Alembic tracking (001_initial_baseline) +- ✅ Created superadmin user helper script + +**Status:** Database is fully initialized and ready for use. Next step: Create superadmin user and start application. diff --git a/DEPLOYMENT.md b/DEPLOYMENT.md new file mode 100644 index 0000000..26ac16a --- /dev/null +++ b/DEPLOYMENT.md @@ -0,0 +1,379 @@ +# Deployment Guide - LOAF Membership Platform + +## Fresh Database Installation + +Follow these steps in order for a **brand new deployment**: + +### Step 1: Create PostgreSQL Database + +```bash +# Connect to PostgreSQL +psql -U postgres + +# Create database +CREATE DATABASE membership_db; + +# Create user (if needed) +CREATE USER loaf_admin WITH PASSWORD 'your-secure-password'; +GRANT ALL PRIVILEGES ON DATABASE membership_db TO loaf_admin; + +# Exit PostgreSQL +\q +``` + +### Step 2: Run Initial Schema + +```bash +cd backend + +# Apply the complete schema (creates all 17 tables, 8 enums, indexes) +psql -U loaf_admin -d membership_db -f migrations/000_initial_schema.sql +``` + +**What this creates:** +- ✅ 17 tables: users, events, subscriptions, roles, permissions, etc. +- ✅ 8 custom enums: UserStatus, UserRole, RSVPStatus, etc. +- ✅ All indexes and foreign keys +- ✅ All constraints and defaults + +### Step 3: Mark Database for Alembic Tracking + +```bash +# Mark the database as being at the baseline +alembic stamp head +``` + +### Step 4: Verify Setup + +```bash +# Check Alembic status +alembic current +# Expected output: 001_initial_baseline (head) + +# Check database tables +psql -U loaf_admin -d membership_db -c "\dt" +# Should show 17 tables +``` + +### Step 5: Set Environment Variables + +Create `backend/.env`: + +```env +# Database +DATABASE_URL=postgresql://loaf_admin:your-password@localhost:5432/membership_db + +# JWT +JWT_SECRET=your-secret-key-minimum-32-characters-long +JWT_ALGORITHM=HS256 +ACCESS_TOKEN_EXPIRE_MINUTES=30 + +# Email (SMTP) +SMTP_HOST=smtp.gmail.com +SMTP_PORT=587 +SMTP_USERNAME=your-email@gmail.com +SMTP_PASSWORD=your-app-password +SMTP_FROM_EMAIL=noreply@loafmembers.org +SMTP_FROM_NAME=LOAF Membership + +# Frontend URL +FRONTEND_URL=https://members.loafmembers.org + +# Cloudflare R2 +R2_ENDPOINT_URL=https://your-account-id.r2.cloudflarestorage.com +R2_ACCESS_KEY_ID=your-r2-access-key +R2_SECRET_ACCESS_KEY=your-r2-secret-key +R2_BUCKET_NAME=loaf-membership +R2_PUBLIC_URL=https://cdn.loafmembers.org + +# Stripe +STRIPE_SECRET_KEY=sk_live_... +STRIPE_WEBHOOK_SECRET=whsec_... +STRIPE_PRICE_ID_ANNUAL=price_... +STRIPE_PRICE_ID_GROUP=price_... +``` + +### Step 6: Install Dependencies + +```bash +# Backend +cd backend +pip install -r requirements.txt + +# Frontend +cd ../frontend +yarn install +``` + +### Step 7: Start Services + +```bash +# Backend (in backend/) +uvicorn server:app --host 0.0.0.0 --port 8000 + +# Frontend (in frontend/) +yarn start +``` + +### Step 8: Create First Superadmin User + +```bash +# Connect to database +psql -U loaf_admin -d membership_db + +# Create superadmin user +INSERT INTO users ( + id, email, password_hash, first_name, last_name, + status, role, email_verified, created_at, updated_at +) VALUES ( + gen_random_uuid(), + 'admin@loafmembers.org', + '$2b$12$your-bcrypt-hashed-password-here', -- Use bcrypt to hash password + 'Admin', + 'User', + 'active', + 'superadmin', + true, + NOW(), + NOW() +); +``` + +**Generate password hash:** +```python +import bcrypt +password = b"your-secure-password" +hashed = bcrypt.hashpw(password, bcrypt.gensalt()) +print(hashed.decode()) +``` + +--- + +## Existing Database Update + +For **updating an existing deployment** with new code: + +### Step 1: Backup Database + +```bash +pg_dump -U loaf_admin membership_db > backup_$(date +%Y%m%d_%H%M%S).sql +``` + +### Step 2: Pull Latest Code + +```bash +git pull origin main +``` + +### Step 3: Install New Dependencies + +```bash +# Backend +cd backend +pip install -r requirements.txt + +# Frontend +cd ../frontend +yarn install +``` + +### Step 4: Apply Database Migrations + +```bash +cd backend + +# Check current migration status +alembic current + +# Apply pending migrations +alembic upgrade head + +# Verify +alembic current +``` + +### Step 5: Restart Services + +```bash +# Restart backend +systemctl restart membership-backend + +# Rebuild and restart frontend +cd frontend +yarn build +systemctl restart membership-frontend +``` + +--- + +## First-Time Alembic Setup (Existing Database) + +If you have an **existing database** that was created with `000_initial_schema.sql` but hasn't been marked for Alembic tracking: + +```bash +cd backend + +# Mark database as being at the baseline (one-time only) +alembic stamp head + +# Verify +alembic current +# Expected output: 001_initial_baseline (head) +``` + +--- + +## Database Schema Verification + +**Check all tables exist:** +```bash +psql -U loaf_admin -d membership_db -c "\dt" +``` + +**Expected tables (17 total):** +- users +- events +- event_rsvps +- subscriptions +- subscription_plans +- permissions +- roles +- role_permissions +- user_invitations +- import_jobs +- import_rollback_audit +- event_galleries +- newsletter_archives +- financial_reports +- bylaws_documents +- donations +- storage_usage + +**Check enums:** +```bash +psql -U loaf_admin -d membership_db -c "\dT" +``` + +**Expected enums (8 total):** +- userstatus +- userrole +- rsvpstatus +- subscriptionstatus +- donationtype +- donationstatus +- invitationstatus +- importjobstatus + +--- + +## Rollback Procedures + +### Rollback Last Migration + +```bash +cd backend +alembic downgrade -1 +``` + +### Rollback to Specific Revision + +```bash +alembic downgrade +``` + +### Complete Database Reset + +```bash +# WARNING: This deletes ALL data! + +# 1. Backup first +pg_dump -U loaf_admin membership_db > emergency_backup.sql + +# 2. Drop database +dropdb membership_db + +# 3. Recreate database +createdb membership_db + +# 4. Run initial schema +psql -U loaf_admin -d membership_db -f backend/migrations/000_initial_schema.sql + +# 5. Mark for Alembic +cd backend +alembic stamp head +``` + +--- + +## Troubleshooting + +### "relation does not exist" error + +The database wasn't initialized properly. + +**Solution:** +```bash +psql -U loaf_admin -d membership_db -f backend/migrations/000_initial_schema.sql +alembic stamp head +``` + +### "Target database is not up to date" + +Migrations haven't been applied. + +**Solution:** +```bash +cd backend +alembic upgrade head +``` + +### "Can't locate revision" + +Alembic tracking is out of sync. + +**Solution:** +```bash +# Check what revision the database thinks it's at +alembic current + +# If empty or wrong, manually set it +alembic stamp head +``` + +### Database connection errors + +Check `.env` file has correct `DATABASE_URL`. + +**Format:** +``` +DATABASE_URL=postgresql://username:password@host:port/database +``` + +--- + +## Production Checklist + +Before going live: + +- [ ] Database created and schema applied +- [ ] Alembic marked as up-to-date (`alembic current` shows baseline) +- [ ] All environment variables set in `.env` +- [ ] Dependencies installed (Python + Node) +- [ ] Superadmin user created +- [ ] SSL certificates configured +- [ ] Backup system in place +- [ ] Monitoring configured +- [ ] Domain DNS pointing to server +- [ ] Email sending verified (SMTP working) +- [ ] Stripe webhook endpoint configured +- [ ] R2 bucket accessible and CORS configured + +--- + +## Support + +For issues: +1. Check logs: `tail -f backend/logs/app.log` +2. Check Alembic status: `alembic current` +3. Verify environment variables: `cat backend/.env` +4. Test database connection: `psql -U loaf_admin -d membership_db` diff --git a/__pycache__/models.cpython-312.pyc b/__pycache__/models.cpython-312.pyc index aa1ad19..173f7c1 100644 Binary files a/__pycache__/models.cpython-312.pyc and b/__pycache__/models.cpython-312.pyc differ diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..f6e2e8d --- /dev/null +++ b/alembic.ini @@ -0,0 +1,118 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +# Use forward slashes (/) also on windows to provide an os agnostic path +script_location = alembic + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +# version_path_separator = newline +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +# sqlalchemy.url = driver://user:pass@localhost/dbname +# Database URL is configured in alembic/env.py from .env file for security + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARNING +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARNING +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic/README b/alembic/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/alembic/README.md b/alembic/README.md new file mode 100644 index 0000000..6056215 --- /dev/null +++ b/alembic/README.md @@ -0,0 +1,259 @@ +# Alembic Database Migrations + +This directory contains **Alembic** database migrations for the LOAF membership platform. + +## What is Alembic? + +Alembic is a lightweight database migration tool for SQLAlchemy. It allows you to: +- Track database schema changes over time +- Apply migrations incrementally +- Roll back changes if needed +- Auto-generate migration scripts from model changes + +## Directory Structure + +``` +alembic/ +├── versions/ # Migration scripts (KEEP IN VERSION CONTROL) +│ └── *.py # Individual migration files +├── env.py # Alembic environment configuration +├── script.py.mako # Template for new migration files +└── README.md # This file +``` + +## Quick Start + +### 1. Create a New Migration + +After making changes to `models.py`, generate a migration: + +```bash +cd backend +alembic revision --autogenerate -m "add_user_bio_field" +``` + +This will create a new file in `alembic/versions/` like: +``` +3e02c74581c9_add_user_bio_field.py +``` + +### 2. Review the Generated Migration + +**IMPORTANT:** Always review auto-generated migrations before applying them! + +```bash +# Open the latest migration file +cat alembic/versions/3e02c74581c9_add_user_bio_field.py +``` + +Check: +- ✅ The `upgrade()` function contains the correct changes +- ✅ The `downgrade()` function properly reverses those changes +- ✅ No unintended table drops or data loss + +### 3. Apply the Migration + +```bash +# Apply all pending migrations +alembic upgrade head + +# Or apply migrations one at a time +alembic upgrade +1 +``` + +### 4. Rollback a Migration + +```bash +# Rollback the last migration +alembic downgrade -1 + +# Rollback to a specific revision +alembic downgrade 3e02c74581c9 +``` + +## Common Commands + +| Command | Description | +|---------|-------------| +| `alembic current` | Show current migration revision | +| `alembic history` | Show migration history | +| `alembic heads` | Show head revisions | +| `alembic upgrade head` | Apply all pending migrations | +| `alembic downgrade -1` | Rollback last migration | +| `alembic revision --autogenerate -m "message"` | Create new migration | +| `alembic stamp head` | Mark database as up-to-date without running migrations | + +## Migration Workflow + +### For Development + +1. **Make changes to `models.py`** + ```python + # In models.py + class User(Base): + # ...existing fields... + bio = Column(Text, nullable=True) # New field + ``` + +2. **Generate migration** + ```bash + alembic revision --autogenerate -m "add_user_bio_field" + ``` + +3. **Review the generated file** + ```python + # In alembic/versions/xxxxx_add_user_bio_field.py + def upgrade(): + op.add_column('users', sa.Column('bio', sa.Text(), nullable=True)) + + def downgrade(): + op.drop_column('users', 'bio') + ``` + +4. **Apply migration** + ```bash + alembic upgrade head + ``` + +5. **Commit migration file to Git** + ```bash + git add alembic/versions/xxxxx_add_user_bio_field.py + git commit -m "Add user bio field" + ``` + +### For Production Deployment + +**Fresh Database (New Installation):** +```bash +# 1. Create database +createdb membership_db + +# 2. Run initial schema SQL (creates all 17 tables) +psql -U username -d membership_db -f ../migrations/000_initial_schema.sql + +# 3. Mark database as up-to-date with Alembic +alembic stamp head + +# 4. Verify +alembic current # Should show: 001_initial_baseline (head) +``` + +**Existing Database (Apply New Migrations):** +```bash +# 1. Pull latest code +git pull origin main + +# 2. Apply migrations +alembic upgrade head + +# 3. Verify +alembic current + +# 4. Restart application +systemctl restart membership-backend +``` + +## Configuration + +### Database Connection + +Alembic reads the `DATABASE_URL` from your `.env` file: + +```env +DATABASE_URL=postgresql://user:password@localhost:5432/membership_db +``` + +The connection is configured in `alembic/env.py` (lines 29-36). + +### Target Metadata + +Alembic uses `Base.metadata` from `models.py` to detect changes: + +```python +# In alembic/env.py +from models import Base +target_metadata = Base.metadata +``` + +## Important Notes + +### ✅ DO: +- Always review auto-generated migrations before applying +- Test migrations in development before production +- Commit migration files to version control +- Write descriptive migration messages +- Include both `upgrade()` and `downgrade()` functions + +### ❌ DON'T: +- Don't edit migration files after they've been applied in production +- Don't delete migration files from `alembic/versions/` +- Don't modify the `revision` or `down_revision` values +- Don't commit `.pyc` files (already in .gitignore) + +## Migration History + +| Revision | Description | Date | Type | +|----------|-------------|------|------| +| `001_initial_baseline` | Baseline marker (empty migration) | 2026-01-02 | Baseline | + +**Note:** The actual initial schema is created by running `backend/migrations/000_initial_schema.sql`. The baseline migration is an empty marker that indicates the starting point for Alembic tracking. + +## Troubleshooting + +### "Target database is not up to date" + +```bash +# Check current revision +alembic current + +# Check pending migrations +alembic history + +# Apply missing migrations +alembic upgrade head +``` + +### "FAILED: Can't locate revision identified by 'xxxxx'" + +The database thinks it's at a revision that doesn't exist in your `alembic/versions/`. + +**Solution:** +```bash +# Mark database at a known good revision +alembic stamp head +``` + +### Migration conflicts + +If you get merge conflicts in migration files: + +1. Resolve conflicts in the migration file +2. Ensure `revision` and `down_revision` chain is correct +3. Test the migration locally + +### Fresh database setup + +For a completely new database: + +```bash +# Step 1: Run initial schema SQL +psql -U username -d membership_db -f ../migrations/000_initial_schema.sql + +# Step 2: Mark as up-to-date +alembic stamp head + +# Step 3: Verify +alembic current # Should show: 001_initial_baseline (head) +``` + +## Legacy Migrations + +Old numbered SQL migrations (`000_initial_schema.sql` through `011_wordpress_import_enhancements.sql`) are preserved in `backend/migrations/` for reference. These have been consolidated into the initial Alembic migration. + +**Going forward, all new migrations must use Alembic.** + +## Additional Resources + +- [Alembic Documentation](https://alembic.sqlalchemy.org/) +- [SQLAlchemy Documentation](https://docs.sqlalchemy.org/) +- [PostgreSQL Documentation](https://www.postgresql.org/docs/) diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 0000000..e74db09 --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,96 @@ +import os +import sys +from logging.config import fileConfig + +from sqlalchemy import engine_from_config, pool +from alembic import context +from dotenv import load_dotenv + +# Add the parent directory to the path so we can import our models +sys.path.insert(0, os.path.dirname(os.path.dirname(__file__))) + +# Load environment variables from .env file +load_dotenv() + +# Import all models so Alembic can detect them +from models import Base +import models # This ensures all models are imported + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# Set the SQLAlchemy URL from environment variable +database_url = os.getenv("DATABASE_URL") +if database_url: + config.set_main_option("sqlalchemy.url", database_url) +else: + raise ValueError( + "DATABASE_URL environment variable not set. " + "Please create a .env file with DATABASE_URL=postgresql://user:password@host:port/dbname" + ) + +# Add your model's MetaData object here for 'autogenerate' support +target_metadata = Base.metadata + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + compare_type=True, # Detect type changes + compare_server_default=True, # Detect default value changes + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=target_metadata, + compare_type=True, # Detect type changes + compare_server_default=True, # Detect default value changes + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 0000000..fbc4b07 --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/alembic/versions/001_initial_baseline.py b/alembic/versions/001_initial_baseline.py new file mode 100644 index 0000000..6a2dd4a --- /dev/null +++ b/alembic/versions/001_initial_baseline.py @@ -0,0 +1,59 @@ +"""initial_baseline - Use 000_initial_schema.sql for fresh deployments + +Revision ID: 001_initial_baseline +Revises: +Create Date: 2026-01-02 16:45:00.000000 + +IMPORTANT: This is a baseline migration for existing databases. + +For FRESH deployments: +1. Run: psql -U user -d dbname -f backend/migrations/000_initial_schema.sql +2. Run: alembic stamp head + +For EXISTING deployments (already have database): +1. Run: alembic stamp head (marks database as up-to-date) + +This migration intentionally does NOTHING because: +- Fresh deployments use 000_initial_schema.sql to create all tables +- Existing deployments already have all tables from 000_initial_schema.sql +- Future migrations will be incremental changes from this baseline +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '001_initial_baseline' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """ + This migration does nothing. + + It serves as a baseline marker that indicates: + - All 17 tables exist (users, events, subscriptions, etc.) + - All 8 enums are defined (UserStatus, UserRole, etc.) + - All indexes and constraints are in place + + The actual schema is created by running: + backend/migrations/000_initial_schema.sql + """ + pass + + +def downgrade() -> None: + """ + Cannot downgrade below baseline. + + If you need to completely reset the database: + 1. dropdb dbname + 2. createdb dbname + 3. psql -U user -d dbname -f backend/migrations/000_initial_schema.sql + 4. alembic stamp head + """ + pass diff --git a/alembic/versions/002_add_missing_user_fields.py b/alembic/versions/002_add_missing_user_fields.py new file mode 100644 index 0000000..f04aeef --- /dev/null +++ b/alembic/versions/002_add_missing_user_fields.py @@ -0,0 +1,92 @@ +"""add_missing_user_fields + +Revision ID: 002_add_missing_user_fields +Revises: 001_initial_baseline +Create Date: 2026-01-04 + +Adds missing user fields to sync models.py with database: +- scholarship_reason +- directory_* fields (email, bio, address, phone, dob, partner_name) +- profile_photo_url (rename from profile_image_url) +- social_media_* fields (facebook, instagram, twitter, linkedin) +- email_verification_expires +""" +from typing import Sequence, Union +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '002_add_missing_user_fields' +down_revision: Union[str, None] = '001_initial_baseline' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Add missing user fields (skip if already exists)""" + from sqlalchemy import inspect + + conn = op.get_bind() + inspector = inspect(conn) + existing_columns = {col['name'] for col in inspector.get_columns('users')} + + # Add scholarship_reason + if 'scholarship_reason' not in existing_columns: + op.add_column('users', sa.Column('scholarship_reason', sa.Text(), nullable=True)) + + # Add directory fields + if 'directory_email' not in existing_columns: + op.add_column('users', sa.Column('directory_email', sa.String(), nullable=True)) + if 'directory_bio' not in existing_columns: + op.add_column('users', sa.Column('directory_bio', sa.Text(), nullable=True)) + if 'directory_address' not in existing_columns: + op.add_column('users', sa.Column('directory_address', sa.String(), nullable=True)) + if 'directory_phone' not in existing_columns: + op.add_column('users', sa.Column('directory_phone', sa.String(), nullable=True)) + if 'directory_dob' not in existing_columns: + op.add_column('users', sa.Column('directory_dob', sa.DateTime(), nullable=True)) + if 'directory_partner_name' not in existing_columns: + op.add_column('users', sa.Column('directory_partner_name', sa.String(), nullable=True)) + + # Rename profile_image_url to profile_photo_url (skip if already renamed) + if 'profile_image_url' in existing_columns and 'profile_photo_url' not in existing_columns: + op.alter_column('users', 'profile_image_url', new_column_name='profile_photo_url') + + # Add social media fields + if 'social_media_facebook' not in existing_columns: + op.add_column('users', sa.Column('social_media_facebook', sa.String(), nullable=True)) + if 'social_media_instagram' not in existing_columns: + op.add_column('users', sa.Column('social_media_instagram', sa.String(), nullable=True)) + if 'social_media_twitter' not in existing_columns: + op.add_column('users', sa.Column('social_media_twitter', sa.String(), nullable=True)) + if 'social_media_linkedin' not in existing_columns: + op.add_column('users', sa.Column('social_media_linkedin', sa.String(), nullable=True)) + + # Add email_verification_expires if missing + if 'email_verification_expires' not in existing_columns: + op.add_column('users', sa.Column('email_verification_expires', sa.DateTime(), nullable=True)) + + +def downgrade() -> None: + """Remove added fields (rollback)""" + + # Remove social media fields + op.drop_column('users', 'social_media_linkedin') + op.drop_column('users', 'social_media_twitter') + op.drop_column('users', 'social_media_instagram') + op.drop_column('users', 'social_media_facebook') + + # Rename profile_photo_url back to profile_image_url + op.alter_column('users', 'profile_photo_url', new_column_name='profile_image_url') + + # Remove directory fields + op.drop_column('users', 'directory_partner_name') + op.drop_column('users', 'directory_dob') + op.drop_column('users', 'directory_phone') + op.drop_column('users', 'directory_address') + op.drop_column('users', 'directory_bio') + op.drop_column('users', 'directory_email') + + # Remove scholarship_reason + op.drop_column('users', 'scholarship_reason') diff --git a/alembic/versions/003_add_user_invitation_fields.py b/alembic/versions/003_add_user_invitation_fields.py new file mode 100644 index 0000000..4c91a11 --- /dev/null +++ b/alembic/versions/003_add_user_invitation_fields.py @@ -0,0 +1,50 @@ +"""add_user_invitation_fields + +Revision ID: 003_add_user_invitation_fields +Revises: 002_add_missing_user_fields +Create Date: 2026-01-04 + +Adds optional pre-filled fields to user_invitations table: +- first_name +- last_name +- phone +""" +from typing import Sequence, Union +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '003_add_user_invitation_fields' +down_revision: Union[str, None] = '002_add_missing_user_fields' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Add optional pre-filled information fields to user_invitations (skip if already exists)""" + from sqlalchemy import inspect + + conn = op.get_bind() + inspector = inspect(conn) + existing_columns = {col['name'] for col in inspector.get_columns('user_invitations')} + + # Add first_name if missing + if 'first_name' not in existing_columns: + op.add_column('user_invitations', sa.Column('first_name', sa.String(), nullable=True)) + + # Add last_name if missing + if 'last_name' not in existing_columns: + op.add_column('user_invitations', sa.Column('last_name', sa.String(), nullable=True)) + + # Add phone if missing + if 'phone' not in existing_columns: + op.add_column('user_invitations', sa.Column('phone', sa.String(), nullable=True)) + + +def downgrade() -> None: + """Remove added fields (rollback)""" + + op.drop_column('user_invitations', 'phone') + op.drop_column('user_invitations', 'last_name') + op.drop_column('user_invitations', 'first_name') diff --git a/alembic/versions/004_add_document_file_sizes.py b/alembic/versions/004_add_document_file_sizes.py new file mode 100644 index 0000000..dbebb09 --- /dev/null +++ b/alembic/versions/004_add_document_file_sizes.py @@ -0,0 +1,52 @@ +"""add_document_file_sizes + +Revision ID: 004_add_document_file_sizes +Revises: 003_add_user_invitation_fields +Create Date: 2026-01-04 + +Adds file_size_bytes to all document tables: +- newsletter_archives +- financial_reports +- bylaws_documents +""" +from typing import Sequence, Union +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '004_add_document_file_sizes' +down_revision: Union[str, None] = '003_add_user_invitation_fields' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Add file_size_bytes column to document tables (skip if already exists)""" + from sqlalchemy import inspect + + conn = op.get_bind() + inspector = inspect(conn) + + # Add to newsletter_archives if missing + existing_columns = {col['name'] for col in inspector.get_columns('newsletter_archives')} + if 'file_size_bytes' not in existing_columns: + op.add_column('newsletter_archives', sa.Column('file_size_bytes', sa.Integer(), nullable=True)) + + # Add to financial_reports if missing + existing_columns = {col['name'] for col in inspector.get_columns('financial_reports')} + if 'file_size_bytes' not in existing_columns: + op.add_column('financial_reports', sa.Column('file_size_bytes', sa.Integer(), nullable=True)) + + # Add to bylaws_documents if missing + existing_columns = {col['name'] for col in inspector.get_columns('bylaws_documents')} + if 'file_size_bytes' not in existing_columns: + op.add_column('bylaws_documents', sa.Column('file_size_bytes', sa.Integer(), nullable=True)) + + +def downgrade() -> None: + """Remove file_size_bytes columns (rollback)""" + + op.drop_column('bylaws_documents', 'file_size_bytes') + op.drop_column('financial_reports', 'file_size_bytes') + op.drop_column('newsletter_archives', 'file_size_bytes') diff --git a/alembic/versions/005_fix_subscriptions_and_storage.py b/alembic/versions/005_fix_subscriptions_and_storage.py new file mode 100644 index 0000000..19cef7b --- /dev/null +++ b/alembic/versions/005_fix_subscriptions_and_storage.py @@ -0,0 +1,81 @@ +"""fix_subscriptions_and_storage + +Revision ID: 005_fix_subs_storage +Revises: 004_add_document_file_sizes +Create Date: 2026-01-04 + +Fixes: +- Add missing columns to subscriptions table +- Rename storage_usage.last_calculated_at to last_updated +""" +from typing import Sequence, Union +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects.postgresql import UUID + + +# revision identifiers, used by Alembic. +revision: str = '005_fix_subs_storage' +down_revision: Union[str, None] = '004_add_document_file_sizes' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Add missing columns and fix naming (skip if already exists)""" + from sqlalchemy import inspect + + conn = op.get_bind() + inspector = inspect(conn) + + # Check existing columns in subscriptions table + existing_columns = {col['name'] for col in inspector.get_columns('subscriptions')} + + # Add missing columns to subscriptions table only if they don't exist + if 'start_date' not in existing_columns: + op.add_column('subscriptions', sa.Column('start_date', sa.DateTime(timezone=True), nullable=True)) + if 'end_date' not in existing_columns: + op.add_column('subscriptions', sa.Column('end_date', sa.DateTime(timezone=True), nullable=True)) + if 'amount_paid_cents' not in existing_columns: + op.add_column('subscriptions', sa.Column('amount_paid_cents', sa.Integer(), nullable=True)) + if 'manual_payment_notes' not in existing_columns: + op.add_column('subscriptions', sa.Column('manual_payment_notes', sa.Text(), nullable=True)) + if 'manual_payment_admin_id' not in existing_columns: + op.add_column('subscriptions', sa.Column('manual_payment_admin_id', UUID(as_uuid=True), nullable=True)) + if 'manual_payment_date' not in existing_columns: + op.add_column('subscriptions', sa.Column('manual_payment_date', sa.DateTime(timezone=True), nullable=True)) + if 'payment_method' not in existing_columns: + op.add_column('subscriptions', sa.Column('payment_method', sa.String(50), nullable=True)) + + # Add foreign key for manual_payment_admin_id if it doesn't exist + existing_fks = [fk['name'] for fk in inspector.get_foreign_keys('subscriptions')] + if 'subscriptions_manual_payment_admin_id_fkey' not in existing_fks: + op.create_foreign_key( + 'subscriptions_manual_payment_admin_id_fkey', + 'subscriptions', 'users', + ['manual_payment_admin_id'], ['id'] + ) + + # Rename storage_usage.last_calculated_at to last_updated (only if needed) + storage_columns = {col['name'] for col in inspector.get_columns('storage_usage')} + if 'last_calculated_at' in storage_columns and 'last_updated' not in storage_columns: + op.alter_column('storage_usage', 'last_calculated_at', new_column_name='last_updated') + + +def downgrade() -> None: + """Remove added columns (rollback)""" + + # Rename back + op.alter_column('storage_usage', 'last_updated', new_column_name='last_calculated_at') + + # Drop foreign key first + op.drop_constraint('subscriptions_manual_payment_admin_id_fkey', 'subscriptions', type_='foreignkey') + + # Drop columns from subscriptions + op.drop_column('subscriptions', 'payment_method') + op.drop_column('subscriptions', 'manual_payment_date') + op.drop_column('subscriptions', 'manual_payment_admin_id') + op.drop_column('subscriptions', 'manual_payment_notes') + op.drop_column('subscriptions', 'amount_paid_cents') + op.drop_column('subscriptions', 'end_date') + op.drop_column('subscriptions', 'start_date') diff --git a/alembic/versions/006_rename_is_active.py b/alembic/versions/006_rename_is_active.py new file mode 100644 index 0000000..77e669f --- /dev/null +++ b/alembic/versions/006_rename_is_active.py @@ -0,0 +1,37 @@ +"""rename_is_active + +Revision ID: 006_rename_active +Revises: 005_fix_subs_storage +Create Date: 2026-01-04 + +Fixes: +- Rename subscription_plans.is_active to active (match models.py) +""" +from typing import Sequence, Union +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '006_rename_active' +down_revision: Union[str, None] = '005_fix_subs_storage' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Rename is_active to active (skip if already renamed)""" + from sqlalchemy import inspect + + conn = op.get_bind() + inspector = inspect(conn) + + # Check if rename is needed + existing_columns = {col['name'] for col in inspector.get_columns('subscription_plans')} + if 'is_active' in existing_columns and 'active' not in existing_columns: + op.alter_column('subscription_plans', 'is_active', new_column_name='active') + + +def downgrade() -> None: + """Rename back to is_active""" + op.alter_column('subscription_plans', 'active', new_column_name='is_active') diff --git a/alembic/versions/007_add_subscription_plan_fields.py b/alembic/versions/007_add_subscription_plan_fields.py new file mode 100644 index 0000000..b21b03b --- /dev/null +++ b/alembic/versions/007_add_subscription_plan_fields.py @@ -0,0 +1,65 @@ +"""add_subscription_plan_fields + +Revision ID: 007_add_sub_fields +Revises: 006_rename_active +Create Date: 2026-01-04 + +Fixes: +- Add missing columns to subscription_plans table + (custom cycle fields, dynamic pricing fields) +""" +from typing import Sequence, Union +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '007_add_sub_fields' +down_revision: Union[str, None] = '006_rename_active' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Add missing columns to subscription_plans (skip if already exists)""" + from alembic import op + import sqlalchemy as sa + from sqlalchemy import inspect + + # Get database connection + conn = op.get_bind() + inspector = inspect(conn) + existing_columns = {col['name'] for col in inspector.get_columns('subscription_plans')} + + # Custom billing cycle fields + if 'custom_cycle_enabled' not in existing_columns: + op.add_column('subscription_plans', sa.Column('custom_cycle_enabled', sa.Boolean(), nullable=False, server_default='false')) + if 'custom_cycle_start_month' not in existing_columns: + op.add_column('subscription_plans', sa.Column('custom_cycle_start_month', sa.Integer(), nullable=True)) + if 'custom_cycle_start_day' not in existing_columns: + op.add_column('subscription_plans', sa.Column('custom_cycle_start_day', sa.Integer(), nullable=True)) + if 'custom_cycle_end_month' not in existing_columns: + op.add_column('subscription_plans', sa.Column('custom_cycle_end_month', sa.Integer(), nullable=True)) + if 'custom_cycle_end_day' not in existing_columns: + op.add_column('subscription_plans', sa.Column('custom_cycle_end_day', sa.Integer(), nullable=True)) + + # Dynamic pricing fields + if 'minimum_price_cents' not in existing_columns: + op.add_column('subscription_plans', sa.Column('minimum_price_cents', sa.Integer(), nullable=False, server_default='3000')) + if 'suggested_price_cents' not in existing_columns: + op.add_column('subscription_plans', sa.Column('suggested_price_cents', sa.Integer(), nullable=True)) + if 'allow_donation' not in existing_columns: + op.add_column('subscription_plans', sa.Column('allow_donation', sa.Boolean(), nullable=False, server_default='true')) + + +def downgrade() -> None: + """Remove added columns (rollback)""" + + op.drop_column('subscription_plans', 'allow_donation') + op.drop_column('subscription_plans', 'suggested_price_cents') + op.drop_column('subscription_plans', 'minimum_price_cents') + op.drop_column('subscription_plans', 'custom_cycle_end_day') + op.drop_column('subscription_plans', 'custom_cycle_end_month') + op.drop_column('subscription_plans', 'custom_cycle_start_day') + op.drop_column('subscription_plans', 'custom_cycle_start_month') + op.drop_column('subscription_plans', 'custom_cycle_enabled') diff --git a/alembic/versions/008_add_donation_columns.py b/alembic/versions/008_add_donation_columns.py new file mode 100644 index 0000000..12ed604 --- /dev/null +++ b/alembic/versions/008_add_donation_columns.py @@ -0,0 +1,55 @@ +"""add_donation_columns + +Revision ID: 008_add_donations +Revises: 007_add_sub_fields +Create Date: 2026-01-04 + +Fixes: +- Add missing Stripe payment columns to donations table +""" +from typing import Sequence, Union +from alembic import op +import sqlalchemy as sa +from sqlalchemy import inspect + + +# revision identifiers, used by Alembic. +revision: str = '008_add_donations' +down_revision: Union[str, None] = '007_add_sub_fields' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Add missing columns to donations table (skip if already exists)""" + + # Get database connection + conn = op.get_bind() + inspector = inspect(conn) + existing_columns = {col['name'] for col in inspector.get_columns('donations')} + + # Stripe payment columns + if 'stripe_checkout_session_id' not in existing_columns: + op.add_column('donations', sa.Column('stripe_checkout_session_id', sa.String(), nullable=True)) + + if 'stripe_payment_intent_id' not in existing_columns: + op.add_column('donations', sa.Column('stripe_payment_intent_id', sa.String(), nullable=True)) + + if 'payment_method' not in existing_columns: + op.add_column('donations', sa.Column('payment_method', sa.String(), nullable=True)) + + if 'notes' not in existing_columns: + op.add_column('donations', sa.Column('notes', sa.Text(), nullable=True)) + + if 'updated_at' not in existing_columns: + op.add_column('donations', sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True)) + + +def downgrade() -> None: + """Remove added columns (rollback)""" + + op.drop_column('donations', 'updated_at') + op.drop_column('donations', 'notes') + op.drop_column('donations', 'payment_method') + op.drop_column('donations', 'stripe_payment_intent_id') + op.drop_column('donations', 'stripe_checkout_session_id') diff --git a/alembic/versions/009_add_all_missing_columns.py b/alembic/versions/009_add_all_missing_columns.py new file mode 100644 index 0000000..a667f26 --- /dev/null +++ b/alembic/versions/009_add_all_missing_columns.py @@ -0,0 +1,237 @@ +"""add_all_missing_columns + +Revision ID: 009_add_all_missing +Revises: 008_add_donations +Create Date: 2026-01-04 + +Fixes: +- Add ALL remaining missing columns across all tables +- Users: newsletter preferences, volunteer, scholarship, directory, password reset, ToS, member_since, reminders, rejection, import tracking +- Events: calendar_uid +- Subscriptions: base_subscription_cents, donation_cents, manual_payment +- ImportJobs: WordPress import fields +- Create ImportRollbackAudit table if not exists +""" +from typing import Sequence, Union +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy import inspect + + +# revision identifiers, used by Alembic. +revision: str = '009_add_all_missing' +down_revision: Union[str, None] = '008_add_donations' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Add all missing columns across all tables""" + + conn = op.get_bind() + inspector = inspect(conn) + + # ============================================================ + # 1. USERS TABLE - Add ~28 missing columns + # ============================================================ + users_columns = {col['name'] for col in inspector.get_columns('users')} + + # Newsletter publication preferences + if 'newsletter_publish_name' not in users_columns: + op.add_column('users', sa.Column('newsletter_publish_name', sa.Boolean(), nullable=False, server_default='false')) + if 'newsletter_publish_photo' not in users_columns: + op.add_column('users', sa.Column('newsletter_publish_photo', sa.Boolean(), nullable=False, server_default='false')) + if 'newsletter_publish_birthday' not in users_columns: + op.add_column('users', sa.Column('newsletter_publish_birthday', sa.Boolean(), nullable=False, server_default='false')) + if 'newsletter_publish_none' not in users_columns: + op.add_column('users', sa.Column('newsletter_publish_none', sa.Boolean(), nullable=False, server_default='false')) + + # Volunteer interests + if 'volunteer_interests' not in users_columns: + op.add_column('users', sa.Column('volunteer_interests', sa.JSON(), nullable=True, server_default='[]')) + + # Scholarship + if 'scholarship_requested' not in users_columns: + op.add_column('users', sa.Column('scholarship_requested', sa.Boolean(), nullable=False, server_default='false')) + + # Directory + if 'show_in_directory' not in users_columns: + op.add_column('users', sa.Column('show_in_directory', sa.Boolean(), nullable=False, server_default='false')) + + # Password reset + if 'password_reset_token' not in users_columns: + op.add_column('users', sa.Column('password_reset_token', sa.String(), nullable=True)) + if 'password_reset_expires' not in users_columns: + op.add_column('users', sa.Column('password_reset_expires', sa.DateTime(), nullable=True)) + if 'force_password_change' not in users_columns: + op.add_column('users', sa.Column('force_password_change', sa.Boolean(), nullable=False, server_default='false')) + + # Terms of Service + if 'accepts_tos' not in users_columns: + op.add_column('users', sa.Column('accepts_tos', sa.Boolean(), nullable=False, server_default='false')) + if 'tos_accepted_at' not in users_columns: + op.add_column('users', sa.Column('tos_accepted_at', sa.DateTime(), nullable=True)) + + # Member since + if 'member_since' not in users_columns: + op.add_column('users', sa.Column('member_since', sa.DateTime(), nullable=True)) + + # Email verification reminders + if 'email_verification_reminders_sent' not in users_columns: + op.add_column('users', sa.Column('email_verification_reminders_sent', sa.Integer(), nullable=False, server_default='0')) + if 'last_email_verification_reminder_at' not in users_columns: + op.add_column('users', sa.Column('last_email_verification_reminder_at', sa.DateTime(), nullable=True)) + + # Event attendance reminders + if 'event_attendance_reminders_sent' not in users_columns: + op.add_column('users', sa.Column('event_attendance_reminders_sent', sa.Integer(), nullable=False, server_default='0')) + if 'last_event_attendance_reminder_at' not in users_columns: + op.add_column('users', sa.Column('last_event_attendance_reminder_at', sa.DateTime(), nullable=True)) + + # Payment reminders + if 'payment_reminders_sent' not in users_columns: + op.add_column('users', sa.Column('payment_reminders_sent', sa.Integer(), nullable=False, server_default='0')) + if 'last_payment_reminder_at' not in users_columns: + op.add_column('users', sa.Column('last_payment_reminder_at', sa.DateTime(), nullable=True)) + + # Renewal reminders + if 'renewal_reminders_sent' not in users_columns: + op.add_column('users', sa.Column('renewal_reminders_sent', sa.Integer(), nullable=False, server_default='0')) + if 'last_renewal_reminder_at' not in users_columns: + op.add_column('users', sa.Column('last_renewal_reminder_at', sa.DateTime(), nullable=True)) + + # Rejection tracking + if 'rejection_reason' not in users_columns: + op.add_column('users', sa.Column('rejection_reason', sa.Text(), nullable=True)) + if 'rejected_at' not in users_columns: + op.add_column('users', sa.Column('rejected_at', sa.DateTime(timezone=True), nullable=True)) + if 'rejected_by' not in users_columns: + op.add_column('users', sa.Column('rejected_by', UUID(as_uuid=True), nullable=True)) + # Note: Foreign key constraint skipped to avoid circular dependency issues + + # WordPress import tracking + if 'import_source' not in users_columns: + op.add_column('users', sa.Column('import_source', sa.String(50), nullable=True)) + if 'import_job_id' not in users_columns: + op.add_column('users', sa.Column('import_job_id', UUID(as_uuid=True), nullable=True)) + # Note: Foreign key will be added after import_jobs table is updated + if 'wordpress_user_id' not in users_columns: + op.add_column('users', sa.Column('wordpress_user_id', sa.BigInteger(), nullable=True)) + if 'wordpress_registered_date' not in users_columns: + op.add_column('users', sa.Column('wordpress_registered_date', sa.DateTime(timezone=True), nullable=True)) + + # ============================================================ + # 2. EVENTS TABLE - Add calendar_uid + # ============================================================ + events_columns = {col['name'] for col in inspector.get_columns('events')} + + if 'calendar_uid' not in events_columns: + op.add_column('events', sa.Column('calendar_uid', sa.String(), nullable=True)) + + # ============================================================ + # 3. SUBSCRIPTIONS TABLE - Add donation tracking + # ============================================================ + subscriptions_columns = {col['name'] for col in inspector.get_columns('subscriptions')} + + if 'base_subscription_cents' not in subscriptions_columns: + op.add_column('subscriptions', sa.Column('base_subscription_cents', sa.Integer(), nullable=True)) + # Update existing rows: base_subscription_cents = amount_paid_cents - donation_cents (default 0) + op.execute("UPDATE subscriptions SET base_subscription_cents = COALESCE(amount_paid_cents, 0) WHERE base_subscription_cents IS NULL") + # Make it non-nullable after populating + op.alter_column('subscriptions', 'base_subscription_cents', nullable=False) + + if 'donation_cents' not in subscriptions_columns: + op.add_column('subscriptions', sa.Column('donation_cents', sa.Integer(), nullable=False, server_default='0')) + + if 'manual_payment' not in subscriptions_columns: + op.add_column('subscriptions', sa.Column('manual_payment', sa.Boolean(), nullable=False, server_default='false')) + + # ============================================================ + # 4. IMPORT_JOBS TABLE - Add WordPress import fields + # ============================================================ + import_jobs_columns = {col['name'] for col in inspector.get_columns('import_jobs')} + + if 'field_mapping' not in import_jobs_columns: + op.add_column('import_jobs', sa.Column('field_mapping', sa.JSON(), nullable=False, server_default='{}')) + + if 'wordpress_metadata' not in import_jobs_columns: + op.add_column('import_jobs', sa.Column('wordpress_metadata', sa.JSON(), nullable=False, server_default='{}')) + + if 'imported_user_ids' not in import_jobs_columns: + op.add_column('import_jobs', sa.Column('imported_user_ids', sa.JSON(), nullable=False, server_default='[]')) + + if 'rollback_at' not in import_jobs_columns: + op.add_column('import_jobs', sa.Column('rollback_at', sa.DateTime(), nullable=True)) + + if 'rollback_by' not in import_jobs_columns: + op.add_column('import_jobs', sa.Column('rollback_by', UUID(as_uuid=True), nullable=True)) + # Foreign key will be added if needed + + # ============================================================ + # 5. CREATE IMPORT_ROLLBACK_AUDIT TABLE + # ============================================================ + if 'import_rollback_audit' not in inspector.get_table_names(): + op.create_table( + 'import_rollback_audit', + sa.Column('id', UUID(as_uuid=True), primary_key=True), + sa.Column('import_job_id', UUID(as_uuid=True), sa.ForeignKey('import_jobs.id'), nullable=False), + sa.Column('rolled_back_by', UUID(as_uuid=True), sa.ForeignKey('users.id'), nullable=False), + sa.Column('rolled_back_at', sa.DateTime(), nullable=False), + sa.Column('deleted_user_count', sa.Integer(), nullable=False), + sa.Column('deleted_user_ids', sa.JSON(), nullable=False), + sa.Column('reason', sa.Text(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=False) + ) + + +def downgrade() -> None: + """Remove all added columns and tables""" + + # Drop import_rollback_audit table + op.drop_table('import_rollback_audit') + + # Drop import_jobs columns + op.drop_column('import_jobs', 'rollback_by') + op.drop_column('import_jobs', 'rollback_at') + op.drop_column('import_jobs', 'imported_user_ids') + op.drop_column('import_jobs', 'wordpress_metadata') + op.drop_column('import_jobs', 'field_mapping') + + # Drop subscriptions columns + op.drop_column('subscriptions', 'manual_payment') + op.drop_column('subscriptions', 'donation_cents') + op.drop_column('subscriptions', 'base_subscription_cents') + + # Drop events columns + op.drop_column('events', 'calendar_uid') + + # Drop users columns (in reverse order) + op.drop_column('users', 'wordpress_registered_date') + op.drop_column('users', 'wordpress_user_id') + op.drop_column('users', 'import_job_id') + op.drop_column('users', 'import_source') + op.drop_column('users', 'rejected_by') + op.drop_column('users', 'rejected_at') + op.drop_column('users', 'rejection_reason') + op.drop_column('users', 'last_renewal_reminder_at') + op.drop_column('users', 'renewal_reminders_sent') + op.drop_column('users', 'last_payment_reminder_at') + op.drop_column('users', 'payment_reminders_sent') + op.drop_column('users', 'last_event_attendance_reminder_at') + op.drop_column('users', 'event_attendance_reminders_sent') + op.drop_column('users', 'last_email_verification_reminder_at') + op.drop_column('users', 'email_verification_reminders_sent') + op.drop_column('users', 'member_since') + op.drop_column('users', 'tos_accepted_at') + op.drop_column('users', 'accepts_tos') + op.drop_column('users', 'force_password_change') + op.drop_column('users', 'password_reset_expires') + op.drop_column('users', 'password_reset_token') + op.drop_column('users', 'show_in_directory') + op.drop_column('users', 'scholarship_requested') + op.drop_column('users', 'volunteer_interests') + op.drop_column('users', 'newsletter_publish_none') + op.drop_column('users', 'newsletter_publish_birthday') + op.drop_column('users', 'newsletter_publish_photo') + op.drop_column('users', 'newsletter_publish_name') diff --git a/alembic/versions/010_add_email_verification_expires.py b/alembic/versions/010_add_email_verification_expires.py new file mode 100644 index 0000000..a484e4b --- /dev/null +++ b/alembic/versions/010_add_email_verification_expires.py @@ -0,0 +1,37 @@ +"""add_email_verification_expires + +Revision ID: 010_add_email_exp +Revises: 009_add_all_missing +Create Date: 2026-01-05 + +Fixes: +- Add missing email_verification_expires column to users table +""" +from typing import Sequence, Union +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '010_add_email_exp' +down_revision: Union[str, None] = '009_add_all_missing' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Add email_verification_expires column (skip if already exists)""" + from sqlalchemy import inspect + + conn = op.get_bind() + inspector = inspect(conn) + existing_columns = {col['name'] for col in inspector.get_columns('users')} + + # Add email_verification_expires if missing + if 'email_verification_expires' not in existing_columns: + op.add_column('users', sa.Column('email_verification_expires', sa.DateTime(), nullable=True)) + + +def downgrade() -> None: + """Remove email_verification_expires column""" + op.drop_column('users', 'email_verification_expires') diff --git a/alembic/versions/011_align_prod_with_dev.py b/alembic/versions/011_align_prod_with_dev.py new file mode 100644 index 0000000..42bc60f --- /dev/null +++ b/alembic/versions/011_align_prod_with_dev.py @@ -0,0 +1,410 @@ +"""align_prod_with_dev + +Revision ID: 011_align_prod_dev +Revises: 010_add_email_exp +Create Date: 2026-01-05 + +Aligns PROD database schema with DEV database schema (source of truth). +Fixes type mismatches, removes PROD-only columns, adds DEV-only columns, updates nullable constraints. +""" +from typing import Sequence, Union +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects.postgresql import JSONB, JSON + +# revision identifiers, used by Alembic. +revision: str = '011_align_prod_dev' +down_revision: Union[str, None] = '010_add_email_exp' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Align PROD schema with DEV schema (source of truth)""" + from sqlalchemy import inspect + + conn = op.get_bind() + inspector = inspect(conn) + + print("Starting schema alignment: PROD → DEV (source of truth)...") + + # ============================================================ + # 1. FIX USERS TABLE + # ============================================================ + print("\n[1/14] Fixing users table...") + + users_columns = {col['name'] for col in inspector.get_columns('users')} + + # Remove PROD-only columns (not in models.py or DEV) + if 'bio' in users_columns: + op.drop_column('users', 'bio') + print(" ✓ Removed users.bio (PROD-only)") + + if 'interests' in users_columns: + op.drop_column('users', 'interests') + print(" ✓ Removed users.interests (PROD-only)") + + try: + # Change constrained VARCHAR(n) to unconstrained VARCHAR + op.alter_column('users', 'first_name', type_=sa.String(), postgresql_using='first_name::varchar') + op.alter_column('users', 'last_name', type_=sa.String(), postgresql_using='last_name::varchar') + op.alter_column('users', 'email', type_=sa.String(), postgresql_using='email::varchar') + op.alter_column('users', 'phone', type_=sa.String(), postgresql_using='phone::varchar') + op.alter_column('users', 'city', type_=sa.String(), postgresql_using='city::varchar') + op.alter_column('users', 'state', type_=sa.String(), postgresql_using='state::varchar') + op.alter_column('users', 'zipcode', type_=sa.String(), postgresql_using='zipcode::varchar') + op.alter_column('users', 'partner_first_name', type_=sa.String(), postgresql_using='partner_first_name::varchar') + op.alter_column('users', 'partner_last_name', type_=sa.String(), postgresql_using='partner_last_name::varchar') + op.alter_column('users', 'referred_by_member_name', type_=sa.String(), postgresql_using='referred_by_member_name::varchar') + op.alter_column('users', 'password_hash', type_=sa.String(), postgresql_using='password_hash::varchar') + op.alter_column('users', 'email_verification_token', type_=sa.String(), postgresql_using='email_verification_token::varchar') + op.alter_column('users', 'password_reset_token', type_=sa.String(), postgresql_using='password_reset_token::varchar') + print(" ✓ Changed VARCHAR(n) to VARCHAR") + + # Change TEXT to VARCHAR + op.alter_column('users', 'address', type_=sa.String(), postgresql_using='address::varchar') + op.alter_column('users', 'profile_photo_url', type_=sa.String(), postgresql_using='profile_photo_url::varchar') + print(" ✓ Changed TEXT to VARCHAR") + + # Change DATE to TIMESTAMP + op.alter_column('users', 'date_of_birth', type_=sa.DateTime(), postgresql_using='date_of_birth::timestamp') + op.alter_column('users', 'member_since', type_=sa.DateTime(), postgresql_using='member_since::timestamp') + print(" ✓ Changed DATE to TIMESTAMP") + + # Change JSONB to JSON + op.alter_column('users', 'lead_sources', type_=JSON(), postgresql_using='lead_sources::json') + print(" ✓ Changed lead_sources JSONB to JSON") + + # Change TEXT to JSON for volunteer_interests + op.alter_column('users', 'volunteer_interests', type_=JSON(), postgresql_using='volunteer_interests::json') + print(" ✓ Changed volunteer_interests TEXT to JSON") + + except Exception as e: + print(f" ⚠️ Warning: Some type conversions failed: {e}") + + # Fill NULL values with defaults BEFORE setting NOT NULL constraints + print(" ⏳ Filling NULL values with defaults...") + + # Update string fields + conn.execute(sa.text("UPDATE users SET address = '' WHERE address IS NULL")) + conn.execute(sa.text("UPDATE users SET city = '' WHERE city IS NULL")) + conn.execute(sa.text("UPDATE users SET state = '' WHERE state IS NULL")) + conn.execute(sa.text("UPDATE users SET zipcode = '' WHERE zipcode IS NULL")) + conn.execute(sa.text("UPDATE users SET phone = '' WHERE phone IS NULL")) + + # Update date_of_birth with sentinel date + conn.execute(sa.text("UPDATE users SET date_of_birth = '1900-01-01'::timestamp WHERE date_of_birth IS NULL")) + + # Update boolean fields + conn.execute(sa.text("UPDATE users SET show_in_directory = false WHERE show_in_directory IS NULL")) + conn.execute(sa.text("UPDATE users SET newsletter_publish_name = false WHERE newsletter_publish_name IS NULL")) + conn.execute(sa.text("UPDATE users SET newsletter_publish_birthday = false WHERE newsletter_publish_birthday IS NULL")) + conn.execute(sa.text("UPDATE users SET newsletter_publish_photo = false WHERE newsletter_publish_photo IS NULL")) + conn.execute(sa.text("UPDATE users SET newsletter_publish_none = false WHERE newsletter_publish_none IS NULL")) + conn.execute(sa.text("UPDATE users SET force_password_change = false WHERE force_password_change IS NULL")) + conn.execute(sa.text("UPDATE users SET scholarship_requested = false WHERE scholarship_requested IS NULL")) + conn.execute(sa.text("UPDATE users SET accepts_tos = false WHERE accepts_tos IS NULL")) + + # Check how many rows were updated + null_check = conn.execute(sa.text(""" + SELECT + COUNT(*) FILTER (WHERE address = '') as address_filled, + COUNT(*) FILTER (WHERE date_of_birth = '1900-01-01'::timestamp) as dob_filled + FROM users + """)).fetchone() + print(f" ✓ Filled NULLs: {null_check[0]} addresses, {null_check[1]} dates of birth") + + # Now safe to set NOT NULL constraints + op.alter_column('users', 'address', nullable=False) + op.alter_column('users', 'city', nullable=False) + op.alter_column('users', 'state', nullable=False) + op.alter_column('users', 'zipcode', nullable=False) + op.alter_column('users', 'phone', nullable=False) + op.alter_column('users', 'date_of_birth', nullable=False) + op.alter_column('users', 'show_in_directory', nullable=False) + op.alter_column('users', 'newsletter_publish_name', nullable=False) + op.alter_column('users', 'newsletter_publish_birthday', nullable=False) + op.alter_column('users', 'newsletter_publish_photo', nullable=False) + op.alter_column('users', 'newsletter_publish_none', nullable=False) + op.alter_column('users', 'force_password_change', nullable=False) + op.alter_column('users', 'scholarship_requested', nullable=False) + op.alter_column('users', 'accepts_tos', nullable=False) + print(" ✓ Set NOT NULL constraints") + + # ============================================================ + # 2. FIX DONATIONS TABLE + # ============================================================ + print("\n[2/14] Fixing donations table...") + + donations_columns = {col['name'] for col in inspector.get_columns('donations')} + + # Remove PROD-only columns + if 'is_anonymous' in donations_columns: + op.drop_column('donations', 'is_anonymous') + print(" ✓ Removed donations.is_anonymous (PROD-only)") + + if 'completed_at' in donations_columns: + op.drop_column('donations', 'completed_at') + print(" ✓ Removed donations.completed_at (PROD-only)") + + if 'message' in donations_columns: + op.drop_column('donations', 'message') + print(" ✓ Removed donations.message (PROD-only)") + + try: + op.alter_column('donations', 'donor_email', type_=sa.String(), postgresql_using='donor_email::varchar') + op.alter_column('donations', 'donor_name', type_=sa.String(), postgresql_using='donor_name::varchar') + op.alter_column('donations', 'stripe_payment_intent_id', type_=sa.String(), postgresql_using='stripe_payment_intent_id::varchar') + print(" ✓ Changed VARCHAR(n) to VARCHAR") + except Exception as e: + print(f" ⚠️ Warning: Type conversion failed: {e}") + + # ============================================================ + # 3. FIX SUBSCRIPTIONS TABLE + # ============================================================ + print("\n[3/14] Fixing subscriptions table...") + + subscriptions_columns = {col['name'] for col in inspector.get_columns('subscriptions')} + + # Remove PROD-only columns + if 'cancel_at_period_end' in subscriptions_columns: + op.drop_column('subscriptions', 'cancel_at_period_end') + print(" ✓ Removed subscriptions.cancel_at_period_end (PROD-only)") + + if 'canceled_at' in subscriptions_columns: + op.drop_column('subscriptions', 'canceled_at') + print(" ✓ Removed subscriptions.canceled_at (PROD-only)") + + if 'current_period_start' in subscriptions_columns: + op.drop_column('subscriptions', 'current_period_start') + print(" ✓ Removed subscriptions.current_period_start (PROD-only)") + + if 'current_period_end' in subscriptions_columns: + op.drop_column('subscriptions', 'current_period_end') + print(" ✓ Removed subscriptions.current_period_end (PROD-only)") + + try: + op.alter_column('subscriptions', 'stripe_subscription_id', type_=sa.String(), postgresql_using='stripe_subscription_id::varchar') + op.alter_column('subscriptions', 'stripe_customer_id', type_=sa.String(), postgresql_using='stripe_customer_id::varchar') + op.alter_column('subscriptions', 'payment_method', type_=sa.String(), postgresql_using='payment_method::varchar') + print(" ✓ Changed VARCHAR(n) to VARCHAR") + except Exception as e: + print(f" ⚠️ Warning: Type conversion failed: {e}") + + # Fix nullable constraints + op.alter_column('subscriptions', 'start_date', nullable=False) + op.alter_column('subscriptions', 'manual_payment', nullable=False) + op.alter_column('subscriptions', 'donation_cents', nullable=False) + op.alter_column('subscriptions', 'base_subscription_cents', nullable=False) + print(" ✓ Fixed nullable constraints") + + # ============================================================ + # 4. FIX STORAGE_USAGE TABLE + # ============================================================ + print("\n[4/14] Fixing storage_usage table...") + + storage_columns = {col['name'] for col in inspector.get_columns('storage_usage')} + + # Remove PROD-only columns + if 'created_at' in storage_columns: + op.drop_column('storage_usage', 'created_at') + print(" ✓ Removed storage_usage.created_at (PROD-only)") + + if 'updated_at' in storage_columns: + op.drop_column('storage_usage', 'updated_at') + print(" ✓ Removed storage_usage.updated_at (PROD-only)") + + op.alter_column('storage_usage', 'max_bytes_allowed', nullable=False) + print(" ✓ Fixed nullable constraint") + + # ============================================================ + # 5. FIX EVENT_GALLERIES TABLE (Add missing DEV columns) + # ============================================================ + print("\n[5/14] Fixing event_galleries table...") + + event_galleries_columns = {col['name'] for col in inspector.get_columns('event_galleries')} + + # Add DEV-only columns (exist in models.py but not in PROD) + if 'image_key' not in event_galleries_columns: + op.add_column('event_galleries', sa.Column('image_key', sa.String(), nullable=False, server_default='')) + print(" ✓ Added event_galleries.image_key") + + if 'file_size_bytes' not in event_galleries_columns: + op.add_column('event_galleries', sa.Column('file_size_bytes', sa.Integer(), nullable=False, server_default='0')) + print(" ✓ Added event_galleries.file_size_bytes") + + try: + op.alter_column('event_galleries', 'image_url', type_=sa.String(), postgresql_using='image_url::varchar') + print(" ✓ Changed TEXT to VARCHAR") + except Exception as e: + print(f" ⚠️ Warning: Type conversion failed: {e}") + + # Note: uploaded_by column already has correct nullable=False in both DEV and PROD + + # ============================================================ + # 6. FIX BYLAWS_DOCUMENTS TABLE + # ============================================================ + print("\n[6/14] Fixing bylaws_documents table...") + + bylaws_columns = {col['name'] for col in inspector.get_columns('bylaws_documents')} + + # Remove PROD-only column + if 'updated_at' in bylaws_columns: + op.drop_column('bylaws_documents', 'updated_at') + print(" ✓ Removed bylaws_documents.updated_at (PROD-only)") + + try: + op.alter_column('bylaws_documents', 'title', type_=sa.String(), postgresql_using='title::varchar') + op.alter_column('bylaws_documents', 'version', type_=sa.String(), postgresql_using='version::varchar') + op.alter_column('bylaws_documents', 'document_url', type_=sa.String(), postgresql_using='document_url::varchar') + op.alter_column('bylaws_documents', 'document_type', type_=sa.String(), postgresql_using='document_type::varchar') + print(" ✓ Changed column types") + except Exception as e: + print(f" ⚠️ Warning: Type conversion failed: {e}") + + op.alter_column('bylaws_documents', 'document_type', nullable=True) + print(" ✓ Fixed nullable constraint") + + # ============================================================ + # 7. FIX EVENTS TABLE + # ============================================================ + print("\n[7/14] Fixing events table...") + + try: + op.alter_column('events', 'title', type_=sa.String(), postgresql_using='title::varchar') + op.alter_column('events', 'location', type_=sa.String(), postgresql_using='location::varchar') + op.alter_column('events', 'calendar_uid', type_=sa.String(), postgresql_using='calendar_uid::varchar') + print(" ✓ Changed VARCHAR(n) to VARCHAR") + except Exception as e: + print(f" ⚠️ Warning: {e}") + + op.alter_column('events', 'location', nullable=False) + op.alter_column('events', 'created_by', nullable=False) + print(" ✓ Fixed nullable constraints") + + # ============================================================ + # 8. FIX PERMISSIONS TABLE + # ============================================================ + print("\n[8/14] Fixing permissions table...") + + try: + op.alter_column('permissions', 'code', type_=sa.String(), postgresql_using='code::varchar') + op.alter_column('permissions', 'name', type_=sa.String(), postgresql_using='name::varchar') + op.alter_column('permissions', 'module', type_=sa.String(), postgresql_using='module::varchar') + print(" ✓ Changed VARCHAR(n) to VARCHAR") + except Exception as e: + print(f" ⚠️ Warning: {e}") + + op.alter_column('permissions', 'module', nullable=False) + print(" ✓ Fixed nullable constraint") + + # ============================================================ + # 9. FIX ROLES TABLE + # ============================================================ + print("\n[9/14] Fixing roles table...") + + try: + op.alter_column('roles', 'code', type_=sa.String(), postgresql_using='code::varchar') + op.alter_column('roles', 'name', type_=sa.String(), postgresql_using='name::varchar') + print(" ✓ Changed VARCHAR(n) to VARCHAR") + except Exception as e: + print(f" ⚠️ Warning: {e}") + + op.alter_column('roles', 'is_system_role', nullable=False) + print(" ✓ Fixed nullable constraint") + + # ============================================================ + # 10. FIX USER_INVITATIONS TABLE + # ============================================================ + print("\n[10/14] Fixing user_invitations table...") + + try: + op.alter_column('user_invitations', 'email', type_=sa.String(), postgresql_using='email::varchar') + op.alter_column('user_invitations', 'token', type_=sa.String(), postgresql_using='token::varchar') + print(" ✓ Changed VARCHAR(n) to VARCHAR") + except Exception as e: + print(f" ⚠️ Warning: {e}") + + op.alter_column('user_invitations', 'invited_at', nullable=False) + print(" ✓ Fixed nullable constraint") + + # ============================================================ + # 11. FIX NEWSLETTER_ARCHIVES TABLE + # ============================================================ + print("\n[11/14] Fixing newsletter_archives table...") + + try: + op.alter_column('newsletter_archives', 'title', type_=sa.String(), postgresql_using='title::varchar') + op.alter_column('newsletter_archives', 'document_url', type_=sa.String(), postgresql_using='document_url::varchar') + op.alter_column('newsletter_archives', 'document_type', type_=sa.String(), postgresql_using='document_type::varchar') + print(" ✓ Changed column types") + except Exception as e: + print(f" ⚠️ Warning: {e}") + + op.alter_column('newsletter_archives', 'document_type', nullable=True) + print(" ✓ Fixed nullable constraint") + + # ============================================================ + # 12. FIX FINANCIAL_REPORTS TABLE + # ============================================================ + print("\n[12/14] Fixing financial_reports table...") + + try: + op.alter_column('financial_reports', 'title', type_=sa.String(), postgresql_using='title::varchar') + op.alter_column('financial_reports', 'document_url', type_=sa.String(), postgresql_using='document_url::varchar') + op.alter_column('financial_reports', 'document_type', type_=sa.String(), postgresql_using='document_type::varchar') + print(" ✓ Changed column types") + except Exception as e: + print(f" ⚠️ Warning: {e}") + + op.alter_column('financial_reports', 'document_type', nullable=True) + print(" ✓ Fixed nullable constraint") + + # ============================================================ + # 13. FIX IMPORT_JOBS TABLE + # ============================================================ + print("\n[13/14] Fixing import_jobs table...") + + try: + op.alter_column('import_jobs', 'filename', type_=sa.String(), postgresql_using='filename::varchar') + op.alter_column('import_jobs', 'file_key', type_=sa.String(), postgresql_using='file_key::varchar') + print(" ✓ Changed VARCHAR(n) to VARCHAR") + + # Change JSONB to JSON + op.alter_column('import_jobs', 'errors', type_=JSON(), postgresql_using='errors::json') + print(" ✓ Changed errors JSONB to JSON") + except Exception as e: + print(f" ⚠️ Warning: {e}") + + # Fix nullable constraints + op.alter_column('import_jobs', 'processed_rows', nullable=False) + op.alter_column('import_jobs', 'successful_rows', nullable=False) + op.alter_column('import_jobs', 'failed_rows', nullable=False) + op.alter_column('import_jobs', 'errors', nullable=False) + op.alter_column('import_jobs', 'started_at', nullable=False) + print(" ✓ Fixed nullable constraints") + + # ============================================================ + # 14. FIX SUBSCRIPTION_PLANS TABLE + # ============================================================ + print("\n[14/14] Fixing subscription_plans table...") + + try: + op.alter_column('subscription_plans', 'name', type_=sa.String(), postgresql_using='name::varchar') + op.alter_column('subscription_plans', 'billing_cycle', type_=sa.String(), postgresql_using='billing_cycle::varchar') + op.alter_column('subscription_plans', 'stripe_price_id', type_=sa.String(), postgresql_using='stripe_price_id::varchar') + print(" ✓ Changed VARCHAR(n) to VARCHAR") + except Exception as e: + print(f" ⚠️ Warning: {e}") + + op.alter_column('subscription_plans', 'minimum_price_cents', nullable=False) + print(" ✓ Fixed nullable constraint") + + print("\n✅ Schema alignment complete! PROD now matches DEV (source of truth)") + + +def downgrade() -> None: + """Revert alignment changes (not recommended)""" + print("⚠️ Downgrade not supported for alignment migration") + print(" To revert, restore from backup") + pass diff --git a/alembic/versions/012_fix_remaining_differences.py b/alembic/versions/012_fix_remaining_differences.py new file mode 100644 index 0000000..62245ab --- /dev/null +++ b/alembic/versions/012_fix_remaining_differences.py @@ -0,0 +1,170 @@ +"""fix_remaining_differences + +Revision ID: 012_fix_remaining +Revises: 011_align_prod_dev +Create Date: 2026-01-05 + +Fixes the last 5 schema differences found after migration 011: +1-2. import_rollback_audit nullable constraints (PROD) +3-4. role_permissions type and nullable (PROD) +5. UserStatus enum values (DEV - remove deprecated values) +""" +from typing import Sequence, Union +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects.postgresql import ENUM + +# revision identifiers, used by Alembic. +revision: str = '012_fix_remaining' +down_revision: Union[str, None] = '011_align_prod_dev' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Fix remaining schema differences""" + from sqlalchemy import inspect + + conn = op.get_bind() + inspector = inspect(conn) + + print("Fixing remaining schema differences...") + + # ============================================================ + # 1. FIX IMPORT_ROLLBACK_AUDIT TABLE (PROD only) + # ============================================================ + print("\n[1/3] Fixing import_rollback_audit nullable constraints...") + + # Check if there are any NULL values first + try: + null_count = conn.execute(sa.text(""" + SELECT COUNT(*) FROM import_rollback_audit + WHERE created_at IS NULL OR rolled_back_at IS NULL + """)).scalar() + + if null_count > 0: + # Fill NULLs with current timestamp + conn.execute(sa.text(""" + UPDATE import_rollback_audit + SET created_at = NOW() WHERE created_at IS NULL + """)) + conn.execute(sa.text(""" + UPDATE import_rollback_audit + SET rolled_back_at = NOW() WHERE rolled_back_at IS NULL + """)) + print(f" ✓ Filled {null_count} NULL timestamps") + + # Now set NOT NULL + op.alter_column('import_rollback_audit', 'created_at', nullable=False) + op.alter_column('import_rollback_audit', 'rolled_back_at', nullable=False) + print(" ✓ Set NOT NULL constraints") + except Exception as e: + print(f" ⚠️ Warning: {e}") + + # ============================================================ + # 2. FIX ROLE_PERMISSIONS TABLE (PROD only) + # ============================================================ + print("\n[2/3] Fixing role_permissions.role type and nullable...") + + try: + # Change VARCHAR(50) to VARCHAR(10) to match UserRole enum + op.alter_column('role_permissions', 'role', + type_=sa.String(10), + postgresql_using='role::varchar(10)') + print(" ✓ Changed VARCHAR(50) to VARCHAR(10)") + + # Set NOT NULL + op.alter_column('role_permissions', 'role', nullable=False) + print(" ✓ Set NOT NULL constraint") + except Exception as e: + print(f" ⚠️ Warning: {e}") + + # ============================================================ + # 3. FIX USERSTATUS ENUM (DEV only - remove deprecated values) + # ============================================================ + print("\n[3/3] Fixing UserStatus enum values...") + + try: + # First, check if the enum has deprecated values + enum_values = conn.execute(sa.text(""" + SELECT enumlabel + FROM pg_enum + WHERE enumtypid = ( + SELECT oid FROM pg_type WHERE typname = 'userstatus' + ) + """)).fetchall() + + enum_values_list = [row[0] for row in enum_values] + has_deprecated = 'pending_approval' in enum_values_list or 'pre_approved' in enum_values_list + + if not has_deprecated: + print(" ✓ UserStatus enum already correct (no deprecated values)") + else: + print(" ⏳ Found deprecated enum values, migrating...") + + # Check if any users have deprecated status values + deprecated_count = conn.execute(sa.text(""" + SELECT COUNT(*) FROM users + WHERE status IN ('pending_approval', 'pre_approved') + """)).scalar() + + if deprecated_count > 0: + print(f" ⏳ Migrating {deprecated_count} users with deprecated status values...") + + # Migrate deprecated values to new equivalents + conn.execute(sa.text(""" + UPDATE users + SET status = 'pre_validated' + WHERE status = 'pre_approved' + """)) + + conn.execute(sa.text(""" + UPDATE users + SET status = 'payment_pending' + WHERE status = 'pending_approval' + """)) + + print(" ✓ Migrated deprecated status values") + else: + print(" ✓ No users with deprecated status values") + + # Now remove deprecated enum values + # PostgreSQL doesn't support removing enum values directly, + # so we need to recreate the enum + conn.execute(sa.text(""" + -- Create new enum with correct values (matches models.py) + CREATE TYPE userstatus_new AS ENUM ( + 'pending_email', + 'pending_validation', + 'pre_validated', + 'payment_pending', + 'active', + 'inactive', + 'canceled', + 'expired', + 'rejected', + 'abandoned' + ); + + -- Update column to use new enum + ALTER TABLE users + ALTER COLUMN status TYPE userstatus_new + USING status::text::userstatus_new; + + -- Drop old enum and rename new one + DROP TYPE userstatus; + ALTER TYPE userstatus_new RENAME TO userstatus; + """)) + + print(" ✓ Updated UserStatus enum (removed deprecated values)") + + except Exception as e: + print(f" ⚠️ Warning: Enum update failed (may already be correct): {e}") + + print("\n✅ All remaining differences fixed!") + + +def downgrade() -> None: + """Revert fixes (not recommended)""" + print("⚠️ Downgrade not supported") + pass diff --git a/alembic/versions/013_sync_role_permissions.py b/alembic/versions/013_sync_role_permissions.py new file mode 100644 index 0000000..64ce6cc --- /dev/null +++ b/alembic/versions/013_sync_role_permissions.py @@ -0,0 +1,147 @@ +"""sync_role_permissions + +Revision ID: 013_sync_permissions +Revises: 012_fix_remaining +Create Date: 2026-01-05 + +Syncs role_permissions between DEV and PROD bidirectionally. +- Adds 18 DEV-only permissions to PROD (new features) +- Adds 6 PROD-only permissions to DEV (operational/security) +Result: Both environments have identical 142 permission mappings +""" +from typing import Sequence, Union +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision: str = '013_sync_permissions' +down_revision: Union[str, None] = '012_fix_remaining' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Sync role_permissions bidirectionally""" + from sqlalchemy import text + + conn = op.get_bind() + + print("Syncing role_permissions between environments...") + + # ============================================================ + # STEP 1: Add missing permissions to ensure all exist + # ============================================================ + print("\n[1/2] Ensuring all permissions exist...") + + # Permissions that should exist (union of both environments) + all_permissions = [ + # From DEV-only list + ('donations.export', 'Export Donations', 'donations'), + ('donations.view', 'View Donations', 'donations'), + ('financials.create', 'Create Financial Reports', 'financials'), + ('financials.delete', 'Delete Financial Reports', 'financials'), + ('financials.edit', 'Edit Financial Reports', 'financials'), + ('financials.export', 'Export Financial Reports', 'financials'), + ('financials.payments', 'Manage Financial Payments', 'financials'), + ('settings.edit', 'Edit Settings', 'settings'), + ('settings.email_templates', 'Manage Email Templates', 'settings'), + ('subscriptions.activate', 'Activate Subscriptions', 'subscriptions'), + ('subscriptions.cancel', 'Cancel Subscriptions', 'subscriptions'), + ('subscriptions.create', 'Create Subscriptions', 'subscriptions'), + ('subscriptions.edit', 'Edit Subscriptions', 'subscriptions'), + ('subscriptions.export', 'Export Subscriptions', 'subscriptions'), + ('subscriptions.plans', 'Manage Subscription Plans', 'subscriptions'), + ('subscriptions.view', 'View Subscriptions', 'subscriptions'), + ('events.calendar_export', 'Export Event Calendar', 'events'), + ('events.rsvps', 'View Event RSVPs', 'events'), + # From PROD-only list + ('permissions.audit', 'Audit Permissions', 'permissions'), + ('permissions.view', 'View Permissions', 'permissions'), + ('settings.backup', 'Manage Backups', 'settings'), + ] + + for code, name, module in all_permissions: + # Insert if not exists + conn.execute(text(f""" + INSERT INTO permissions (id, code, name, description, module, created_at) + SELECT + gen_random_uuid(), + '{code}', + '{name}', + '{name}', + '{module}', + NOW() + WHERE NOT EXISTS ( + SELECT 1 FROM permissions WHERE code = '{code}' + ) + """)) + + print(" ✓ Ensured all permissions exist") + + # ============================================================ + # STEP 2: Add missing role-permission mappings + # ============================================================ + print("\n[2/2] Adding missing role-permission mappings...") + + # Mappings that should exist (union of both environments) + role_permission_mappings = [ + # DEV-only (add to PROD) + ('admin', 'donations.export'), + ('admin', 'donations.view'), + ('admin', 'financials.create'), + ('admin', 'financials.delete'), + ('admin', 'financials.edit'), + ('admin', 'financials.export'), + ('admin', 'financials.payments'), + ('admin', 'settings.edit'), + ('admin', 'settings.email_templates'), + ('admin', 'subscriptions.activate'), + ('admin', 'subscriptions.cancel'), + ('admin', 'subscriptions.create'), + ('admin', 'subscriptions.edit'), + ('admin', 'subscriptions.export'), + ('admin', 'subscriptions.plans'), + ('admin', 'subscriptions.view'), + ('member', 'events.calendar_export'), + ('member', 'events.rsvps'), + # PROD-only (add to DEV) + ('admin', 'permissions.audit'), + ('admin', 'permissions.view'), + ('admin', 'settings.backup'), + ('finance', 'bylaws.view'), + ('finance', 'events.view'), + ('finance', 'newsletters.view'), + ] + + added_count = 0 + for role, perm_code in role_permission_mappings: + result = conn.execute(text(f""" + INSERT INTO role_permissions (id, role, permission_id, created_at) + SELECT + gen_random_uuid(), + '{role}', + p.id, + NOW() + FROM permissions p + WHERE p.code = '{perm_code}' + AND NOT EXISTS ( + SELECT 1 FROM role_permissions rp + WHERE rp.role = '{role}' + AND rp.permission_id = p.id + ) + RETURNING id + """)) + if result.rowcount > 0: + added_count += 1 + + print(f" ✓ Added {added_count} missing role-permission mappings") + + # Verify final count + final_count = conn.execute(text("SELECT COUNT(*) FROM role_permissions")).scalar() + print(f"\n✅ Role-permission mappings synchronized: {final_count} total") + + +def downgrade() -> None: + """Revert sync (not recommended)""" + print("⚠️ Downgrade not supported - permissions are additive") + pass diff --git a/check_all_columns.sql b/check_all_columns.sql new file mode 100644 index 0000000..a4c0dc3 --- /dev/null +++ b/check_all_columns.sql @@ -0,0 +1,92 @@ +-- Comprehensive check for all missing columns +-- Run: psql -h 10.9.23.11 -p 54321 -U postgres -d loaf_new -f check_all_columns.sql + +\echo '================================================================' +\echo 'COMPREHENSIVE COLUMN CHECK FOR ALL TABLES' +\echo '================================================================' + +-- ============================================================ +-- 1. USERS TABLE +-- ============================================================ +\echo '' +\echo '1. USERS TABLE - Expected: 60+ columns' +\echo 'Checking for specific columns:' + +SELECT + CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'newsletter_publish_name') THEN '✓' ELSE '✗' END || ' newsletter_publish_name', + CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'volunteer_interests') THEN '✓' ELSE '✗' END || ' volunteer_interests', + CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'scholarship_requested') THEN '✓' ELSE '✗' END || ' scholarship_requested', + CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'show_in_directory') THEN '✓' ELSE '✗' END || ' show_in_directory', + CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'password_reset_token') THEN '✓' ELSE '✗' END || ' password_reset_token', + CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'accepts_tos') THEN '✓' ELSE '✗' END || ' accepts_tos', + CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'member_since') THEN '✓' ELSE '✗' END || ' member_since', + CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'rejection_reason') THEN '✓' ELSE '✗' END || ' rejection_reason', + CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'import_source') THEN '✓' ELSE '✗' END || ' import_source' +\gx + +-- ============================================================ +-- 2. EVENTS TABLE +-- ============================================================ +\echo '' +\echo '2. EVENTS TABLE' + +SELECT + CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'events' AND column_name = 'calendar_uid') THEN '✓' ELSE '✗' END || ' calendar_uid'; + +-- ============================================================ +-- 3. SUBSCRIPTIONS TABLE +-- ============================================================ +\echo '' +\echo '3. SUBSCRIPTIONS TABLE' + +SELECT + CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'subscriptions' AND column_name = 'base_subscription_cents') THEN '✓' ELSE '✗' END || ' base_subscription_cents', + CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'subscriptions' AND column_name = 'donation_cents') THEN '✓' ELSE '✗' END || ' donation_cents', + CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'subscriptions' AND column_name = 'manual_payment') THEN '✓' ELSE '✗' END || ' manual_payment' +\gx + +-- ============================================================ +-- 4. IMPORT_JOBS TABLE +-- ============================================================ +\echo '' +\echo '4. IMPORT_JOBS TABLE' + +SELECT + CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'import_jobs' AND column_name = 'field_mapping') THEN '✓' ELSE '✗' END || ' field_mapping', + CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'import_jobs' AND column_name = 'wordpress_metadata') THEN '✓' ELSE '✗' END || ' wordpress_metadata', + CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'import_jobs' AND column_name = 'imported_user_ids') THEN '✓' ELSE '✗' END || ' imported_user_ids', + CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'import_jobs' AND column_name = 'rollback_at') THEN '✓' ELSE '✗' END || ' rollback_at', + CASE WHEN EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'import_jobs' AND column_name = 'rollback_by') THEN '✓' ELSE '✗' END || ' rollback_by' +\gx + +-- ============================================================ +-- 5. CHECK IF IMPORT_ROLLBACK_AUDIT TABLE EXISTS +-- ============================================================ +\echo '' +\echo '5. IMPORT_ROLLBACK_AUDIT TABLE - Should exist' +SELECT CASE + WHEN EXISTS (SELECT FROM information_schema.tables WHERE table_name = 'import_rollback_audit') + THEN '✓ Table exists' + ELSE '✗ TABLE MISSING - Need to create it' +END AS status; + +-- ============================================================ +-- SUMMARY: Count existing columns in each table +-- ============================================================ +\echo '' +\echo '================================================================' +\echo 'SUMMARY: Column counts per table' +\echo '================================================================' + +SELECT + table_name, + COUNT(*) as column_count +FROM information_schema.columns +WHERE table_name IN ( + 'users', 'events', 'event_rsvps', 'subscription_plans', 'subscriptions', + 'donations', 'event_galleries', 'newsletter_archives', 'financial_reports', + 'bylaws_documents', 'storage_usage', 'permissions', 'roles', 'role_permissions', + 'user_invitations', 'import_jobs', 'import_rollback_audit' +) +GROUP BY table_name +ORDER BY table_name; diff --git a/check_db_integrity.py b/check_db_integrity.py new file mode 100644 index 0000000..72e72e0 --- /dev/null +++ b/check_db_integrity.py @@ -0,0 +1,345 @@ +#!/usr/bin/env python3 +""" +Database Integrity Checker +Compares schema and data integrity between development and production databases +""" + +import sys +from sqlalchemy import create_engine, inspect, text +from sqlalchemy.engine import reflection +import json +from collections import defaultdict + +# Database URLs +DEV_DB = "postgresql://postgres:RchhcpaUKZuZuMOvB5kwCP1weLBnAG6tNMXE5FHdk8AwCvolBMALYFVYRM7WCl9x@10.9.23.11:5001/membership_demo" +PROD_DB = "postgresql://postgres:fDv3fRvMgfPueDWDUxj27NJVaynsewIdh6b2Hb28tcvG3Ew6mhscASg2kulx4tr7@10.9.23.11:54321/loaf_new" + +def get_db_info(engine, label): + """Get comprehensive database information""" + inspector = inspect(engine) + + info = { + 'label': label, + 'tables': {}, + 'indexes': {}, + 'foreign_keys': {}, + 'sequences': [], + 'enums': [] + } + + # Get all table names + table_names = inspector.get_table_names() + + for table_name in table_names: + # Get columns + columns = inspector.get_columns(table_name) + info['tables'][table_name] = { + 'columns': { + col['name']: { + 'type': str(col['type']), + 'nullable': col['nullable'], + 'default': str(col.get('default', None)), + 'autoincrement': col.get('autoincrement', False) + } + for col in columns + }, + 'column_count': len(columns) + } + + # Get primary keys + pk = inspector.get_pk_constraint(table_name) + info['tables'][table_name]['primary_key'] = pk.get('constrained_columns', []) + + # Get indexes + indexes = inspector.get_indexes(table_name) + info['indexes'][table_name] = [ + { + 'name': idx['name'], + 'columns': idx['column_names'], + 'unique': idx['unique'] + } + for idx in indexes + ] + + # Get foreign keys + fks = inspector.get_foreign_keys(table_name) + info['foreign_keys'][table_name] = [ + { + 'name': fk.get('name'), + 'columns': fk['constrained_columns'], + 'referred_table': fk['referred_table'], + 'referred_columns': fk['referred_columns'] + } + for fk in fks + ] + + # Get sequences + with engine.connect() as conn: + result = conn.execute(text(""" + SELECT sequence_name + FROM information_schema.sequences + WHERE sequence_schema = 'public' + """)) + info['sequences'] = [row[0] for row in result] + + # Get enum types + result = conn.execute(text(""" + SELECT t.typname as enum_name, + array_agg(e.enumlabel ORDER BY e.enumsortorder) as enum_values + FROM pg_type t + JOIN pg_enum e ON t.oid = e.enumtypid + WHERE t.typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = 'public') + GROUP BY t.typname + """)) + info['enums'] = {row[0]: row[1] for row in result} + + return info + +def compare_tables(dev_info, prod_info): + """Compare tables between databases""" + dev_tables = set(dev_info['tables'].keys()) + prod_tables = set(prod_info['tables'].keys()) + + print("\n" + "="*80) + print("TABLE COMPARISON") + print("="*80) + + # Tables only in dev + dev_only = dev_tables - prod_tables + if dev_only: + print(f"\n❌ Tables only in DEV ({len(dev_only)}):") + for table in sorted(dev_only): + print(f" - {table}") + + # Tables only in prod + prod_only = prod_tables - dev_tables + if prod_only: + print(f"\n❌ Tables only in PROD ({len(prod_only)}):") + for table in sorted(prod_only): + print(f" - {table}") + + # Common tables + common = dev_tables & prod_tables + print(f"\n✅ Common tables: {len(common)}") + + return common + +def compare_columns(dev_info, prod_info, common_tables): + """Compare columns for common tables""" + print("\n" + "="*80) + print("COLUMN COMPARISON") + print("="*80) + + issues = [] + + for table in sorted(common_tables): + dev_cols = set(dev_info['tables'][table]['columns'].keys()) + prod_cols = set(prod_info['tables'][table]['columns'].keys()) + + dev_only = dev_cols - prod_cols + prod_only = prod_cols - dev_cols + + if dev_only or prod_only: + print(f"\n⚠️ Table '{table}' has column differences:") + + if dev_only: + print(f" Columns only in DEV: {', '.join(sorted(dev_only))}") + issues.append(f"{table}: DEV-only columns: {', '.join(dev_only)}") + + if prod_only: + print(f" Columns only in PROD: {', '.join(sorted(prod_only))}") + issues.append(f"{table}: PROD-only columns: {', '.join(prod_only)}") + + # Compare column types for common columns + common_cols = dev_cols & prod_cols + for col in common_cols: + dev_col = dev_info['tables'][table]['columns'][col] + prod_col = prod_info['tables'][table]['columns'][col] + + if dev_col['type'] != prod_col['type']: + print(f" ⚠️ Column '{col}' type mismatch:") + print(f" DEV: {dev_col['type']}") + print(f" PROD: {prod_col['type']}") + issues.append(f"{table}.{col}: Type mismatch") + + if dev_col['nullable'] != prod_col['nullable']: + print(f" ⚠️ Column '{col}' nullable mismatch:") + print(f" DEV: {dev_col['nullable']}") + print(f" PROD: {prod_col['nullable']}") + issues.append(f"{table}.{col}: Nullable mismatch") + + if not issues: + print("\n✅ All columns match between DEV and PROD") + + return issues + +def compare_enums(dev_info, prod_info): + """Compare enum types""" + print("\n" + "="*80) + print("ENUM TYPE COMPARISON") + print("="*80) + + dev_enums = set(dev_info['enums'].keys()) + prod_enums = set(prod_info['enums'].keys()) + + dev_only = dev_enums - prod_enums + prod_only = prod_enums - dev_enums + + issues = [] + + if dev_only: + print(f"\n❌ Enums only in DEV: {', '.join(sorted(dev_only))}") + issues.extend([f"Enum '{e}' only in DEV" for e in dev_only]) + + if prod_only: + print(f"\n❌ Enums only in PROD: {', '.join(sorted(prod_only))}") + issues.extend([f"Enum '{e}' only in PROD" for e in prod_only]) + + # Compare enum values for common enums + common = dev_enums & prod_enums + for enum_name in sorted(common): + dev_values = set(dev_info['enums'][enum_name]) + prod_values = set(prod_info['enums'][enum_name]) + + if dev_values != prod_values: + print(f"\n⚠️ Enum '{enum_name}' values differ:") + print(f" DEV: {', '.join(sorted(dev_values))}") + print(f" PROD: {', '.join(sorted(prod_values))}") + issues.append(f"Enum '{enum_name}' values differ") + + if not issues: + print("\n✅ All enum types match") + + return issues + +def check_migration_history(dev_engine, prod_engine): + """Check Alembic migration history""" + print("\n" + "="*80) + print("MIGRATION HISTORY") + print("="*80) + + try: + with dev_engine.connect() as dev_conn: + dev_result = dev_conn.execute(text("SELECT version_num FROM alembic_version")) + dev_version = dev_result.fetchone() + dev_version = dev_version[0] if dev_version else None + + with prod_engine.connect() as prod_conn: + prod_result = prod_conn.execute(text("SELECT version_num FROM alembic_version")) + prod_version = prod_result.fetchone() + prod_version = prod_version[0] if prod_version else None + + print(f"\nDEV migration version: {dev_version}") + print(f"PROD migration version: {prod_version}") + + if dev_version == prod_version: + print("✅ Migration versions match") + return [] + else: + print("❌ Migration versions DO NOT match") + return ["Migration versions differ"] + + except Exception as e: + print(f"⚠️ Could not check migration history: {str(e)}") + return [f"Migration check failed: {str(e)}"] + +def get_row_counts(engine, tables): + """Get row counts for all tables""" + counts = {} + with engine.connect() as conn: + for table in tables: + result = conn.execute(text(f"SELECT COUNT(*) FROM {table}")) + counts[table] = result.fetchone()[0] + return counts + +def compare_data_counts(dev_engine, prod_engine, common_tables): + """Compare row counts between databases""" + print("\n" + "="*80) + print("DATA ROW COUNTS") + print("="*80) + + print("\nGetting DEV row counts...") + dev_counts = get_row_counts(dev_engine, common_tables) + + print("Getting PROD row counts...") + prod_counts = get_row_counts(prod_engine, common_tables) + + print(f"\n{'Table':<30} {'DEV':<15} {'PROD':<15} {'Diff':<15}") + print("-" * 75) + + for table in sorted(common_tables): + dev_count = dev_counts[table] + prod_count = prod_counts[table] + diff = dev_count - prod_count + diff_str = f"+{diff}" if diff > 0 else str(diff) + + status = "⚠️ " if abs(diff) > 0 else "✅" + print(f"{status} {table:<28} {dev_count:<15} {prod_count:<15} {diff_str:<15}") + +def main(): + print("\n" + "="*80) + print("DATABASE INTEGRITY CHECKER") + print("="*80) + print(f"\nDEV: {DEV_DB.split('@')[1]}") # Hide password + print(f"PROD: {PROD_DB.split('@')[1]}") + + try: + # Connect to databases + print("\n🔌 Connecting to databases...") + dev_engine = create_engine(DEV_DB) + prod_engine = create_engine(PROD_DB) + + # Test connections + with dev_engine.connect() as conn: + conn.execute(text("SELECT 1")) + print("✅ Connected to DEV database") + + with prod_engine.connect() as conn: + conn.execute(text("SELECT 1")) + print("✅ Connected to PROD database") + + # Get database info + print("\n📊 Gathering database information...") + dev_info = get_db_info(dev_engine, "DEV") + prod_info = get_db_info(prod_engine, "PROD") + + # Run comparisons + all_issues = [] + + common_tables = compare_tables(dev_info, prod_info) + + column_issues = compare_columns(dev_info, prod_info, common_tables) + all_issues.extend(column_issues) + + enum_issues = compare_enums(dev_info, prod_info) + all_issues.extend(enum_issues) + + migration_issues = check_migration_history(dev_engine, prod_engine) + all_issues.extend(migration_issues) + + compare_data_counts(dev_engine, prod_engine, common_tables) + + # Summary + print("\n" + "="*80) + print("SUMMARY") + print("="*80) + + if all_issues: + print(f"\n❌ Found {len(all_issues)} integrity issues:") + for i, issue in enumerate(all_issues, 1): + print(f" {i}. {issue}") + print("\n⚠️ Databases are NOT in sync!") + sys.exit(1) + else: + print("\n✅ Databases are in sync!") + print("✅ No integrity issues found") + sys.exit(0) + + except Exception as e: + print(f"\n❌ Error: {str(e)}") + import traceback + traceback.print_exc() + sys.exit(1) + +if __name__ == "__main__": + main() diff --git a/check_schema_mismatches.py b/check_schema_mismatches.py new file mode 100644 index 0000000..2110be4 --- /dev/null +++ b/check_schema_mismatches.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python3 +""" +Check for schema mismatches between models.py and database +""" +import os +from sqlalchemy import create_engine, inspect +from dotenv import load_dotenv +from models import Base + +load_dotenv() + +# Connect to database +engine = create_engine(os.getenv('DATABASE_URL')) +inspector = inspect(engine) + +print("=" * 80) +print("SCHEMA MISMATCH DETECTION") +print("=" * 80) + +mismatches = [] + +# Check each model +for table_name, table in Base.metadata.tables.items(): + print(f"\n📋 Checking table: {table_name}") + + # Get columns from database + try: + db_columns = {col['name'] for col in inspector.get_columns(table_name)} + except Exception as e: + print(f" ❌ Table doesn't exist in database: {e}") + mismatches.append(f"{table_name}: Table missing in database") + continue + + # Get columns from model + model_columns = {col.name for col in table.columns} + + # Find missing columns + missing_in_db = model_columns - db_columns + extra_in_db = db_columns - model_columns + + if missing_in_db: + print(f" ⚠️ Missing in DATABASE: {missing_in_db}") + mismatches.append(f"{table_name}: Missing in DB: {missing_in_db}") + + if extra_in_db: + print(f" ℹ️ Extra in DATABASE (not in model): {extra_in_db}") + + if not missing_in_db and not extra_in_db: + print(f" ✅ Schema matches!") + +print("\n" + "=" * 80) +if mismatches: + print(f"❌ FOUND {len(mismatches)} MISMATCHES:") + for mismatch in mismatches: + print(f" - {mismatch}") +else: + print("✅ ALL SCHEMAS MATCH!") +print("=" * 80) diff --git a/create_superadmin.py b/create_superadmin.py new file mode 100644 index 0000000..7644bc9 --- /dev/null +++ b/create_superadmin.py @@ -0,0 +1,105 @@ +#!/usr/bin/env python3 +""" +Create Superadmin User Script +Generates a superadmin user with hashed password for LOAF membership platform +""" + +import bcrypt +import sys +import os +from getpass import getpass + +def generate_password_hash(password: str) -> str: + """Generate bcrypt hash for password""" + return bcrypt.hashpw(password.encode(), bcrypt.gensalt()).decode() + +def generate_sql(email: str, password_hash: str, first_name: str, last_name: str) -> str: + """Generate SQL INSERT statement""" + return f""" +-- Create Superadmin User +INSERT INTO users ( + id, email, password_hash, first_name, last_name, + status, role, email_verified, created_at, updated_at +) VALUES ( + gen_random_uuid(), + '{email}', + '{password_hash}', + '{first_name}', + '{last_name}', + 'active', + 'superadmin', + true, + NOW(), + NOW() +); +""" + +def main(): + print("=" * 70) + print("LOAF Membership Platform - Superadmin User Creator") + print("=" * 70) + print() + + # Get user input + email = input("Email address: ").strip() + if not email or '@' not in email: + print("❌ Invalid email address") + sys.exit(1) + + first_name = input("First name: ").strip() + if not first_name: + print("❌ First name is required") + sys.exit(1) + + last_name = input("Last name: ").strip() + if not last_name: + print("❌ Last name is required") + sys.exit(1) + + # Get password securely + password = getpass("Password: ") + if len(password) < 8: + print("❌ Password must be at least 8 characters") + sys.exit(1) + + password_confirm = getpass("Confirm password: ") + if password != password_confirm: + print("❌ Passwords do not match") + sys.exit(1) + + print() + print("Generating password hash...") + password_hash = generate_password_hash(password) + + print("✅ Password hash generated") + print() + print("=" * 70) + print("SQL STATEMENT") + print("=" * 70) + + sql = generate_sql(email, password_hash, first_name, last_name) + print(sql) + + # Save to file + output_file = "create_superadmin.sql" + with open(output_file, 'w') as f: + f.write(sql) + + print("=" * 70) + print(f"✅ SQL saved to: {output_file}") + print() + print("Run this command to create the user:") + print(f" psql -U postgres -d loaf_new -f {output_file}") + print() + print("Or copy the SQL above and run it directly in psql") + print("=" * 70) + +if __name__ == "__main__": + try: + main() + except KeyboardInterrupt: + print("\n\n❌ Cancelled by user") + sys.exit(1) + except Exception as e: + print(f"\n❌ Error: {e}") + sys.exit(1) diff --git a/fix_all_schema_mismatches.sh b/fix_all_schema_mismatches.sh new file mode 100644 index 0000000..8c6c864 --- /dev/null +++ b/fix_all_schema_mismatches.sh @@ -0,0 +1,44 @@ +#!/bin/bash +# Fix all schema mismatches between models.py and database +# Run this on your server + +set -e # Exit on error + +echo "============================================================" +echo "Schema Mismatch Fix Script" +echo "============================================================" +echo "" + +# Navigate to backend directory +cd "$(dirname "$0")" + +echo "Step 1: Check current Alembic status..." +python3 -m alembic current + +echo "" +echo "Step 2: Apply migration 003 (user_invitations fields)..." +python3 -m alembic upgrade head + +echo "" +echo "Step 3: Verify migration was applied..." +python3 -m alembic current + +echo "" +echo "Step 4: Restart PM2 backend..." +pm2 restart membership-backend + +echo "" +echo "============================================================" +echo "✅ Schema fixes applied!" +echo "============================================================" +echo "" +echo "Migrations applied:" +echo " - 001_initial_baseline" +echo " - 002_add_missing_user_fields (users table)" +echo " - 003_add_user_invitation_fields (user_invitations table)" +echo "" +echo "Please test:" +echo " 1. Login to admin dashboard" +echo " 2. Navigate to user invitations page" +echo " 3. Verify no more schema errors" +echo "" diff --git a/migrations/000_initial_schema.sql b/migrations/000_initial_schema.sql index 72591b0..87c5f8a 100644 --- a/migrations/000_initial_schema.sql +++ b/migrations/000_initial_schema.sql @@ -146,14 +146,19 @@ CREATE TABLE IF NOT EXISTS users ( -- Membership member_since DATE, - tos_accepted BOOLEAN DEFAULT FALSE, + accepts_tos BOOLEAN DEFAULT FALSE, tos_accepted_at TIMESTAMP WITH TIME ZONE, newsletter_subscribed BOOLEAN DEFAULT TRUE, - -- Reminder Tracking - reminder_30_days_sent BOOLEAN DEFAULT FALSE, - reminder_60_days_sent BOOLEAN DEFAULT FALSE, - reminder_85_days_sent BOOLEAN DEFAULT FALSE, + -- Reminder Tracking (from migration 004) + email_verification_reminders_sent INTEGER DEFAULT 0 NOT NULL, + last_email_verification_reminder_at TIMESTAMP WITH TIME ZONE, + event_attendance_reminders_sent INTEGER DEFAULT 0 NOT NULL, + last_event_attendance_reminder_at TIMESTAMP WITH TIME ZONE, + payment_reminders_sent INTEGER DEFAULT 0 NOT NULL, + last_payment_reminder_at TIMESTAMP WITH TIME ZONE, + renewal_reminders_sent INTEGER DEFAULT 0 NOT NULL, + last_renewal_reminder_at TIMESTAMP WITH TIME ZONE, -- WordPress Import Tracking import_source VARCHAR(50), diff --git a/migrations/004_fix_all_permissions.sql b/migrations/004_fix_all_permissions.sql new file mode 100644 index 0000000..4124ff6 --- /dev/null +++ b/migrations/004_fix_all_permissions.sql @@ -0,0 +1,172 @@ +-- ============================================================================ +-- Fix All Permission Codes to Match Backend Code +-- This migration adds all missing permissions that the code actually checks for +-- ============================================================================ + +BEGIN; + +-- ============================================================================ +-- Delete old incorrect permissions and role mappings +-- ============================================================================ + +DELETE FROM role_permissions; +DELETE FROM permissions; + +-- ============================================================================ +-- Create ALL permissions that backend code actually checks for +-- ============================================================================ + +INSERT INTO permissions (id, code, name, description, module, created_at) +VALUES + -- Users Permissions + (gen_random_uuid(), 'users.view', 'View Users', 'View user list and profiles', 'users', NOW()), + (gen_random_uuid(), 'users.create', 'Create Users', 'Create new users', 'users', NOW()), + (gen_random_uuid(), 'users.edit', 'Edit Users', 'Edit user information', 'users', NOW()), + (gen_random_uuid(), 'users.approve', 'Approve Users', 'Approve pending memberships', 'users', NOW()), + (gen_random_uuid(), 'users.import', 'Import Users', 'Import users from CSV', 'users', NOW()), + (gen_random_uuid(), 'users.export', 'Export Users', 'Export users to CSV', 'users', NOW()), + (gen_random_uuid(), 'users.status', 'Change User Status', 'Update user status', 'users', NOW()), + (gen_random_uuid(), 'users.reset_password', 'Reset User Password', 'Reset user passwords', 'users', NOW()), + (gen_random_uuid(), 'users.resend_verification', 'Resend Verification', 'Resend email verification', 'users', NOW()), + + -- Events Permissions + (gen_random_uuid(), 'events.view', 'View Events', 'View event list', 'events', NOW()), + (gen_random_uuid(), 'events.create', 'Create Events', 'Create new events', 'events', NOW()), + (gen_random_uuid(), 'events.edit', 'Edit Events', 'Edit event information', 'events', NOW()), + (gen_random_uuid(), 'events.delete', 'Delete Events', 'Delete events', 'events', NOW()), + (gen_random_uuid(), 'events.rsvps', 'View RSVPs', 'View event RSVPs', 'events', NOW()), + (gen_random_uuid(), 'events.attendance', 'Manage Attendance', 'Mark attendance', 'events', NOW()), + + -- Gallery Permissions + (gen_random_uuid(), 'gallery.upload', 'Upload Photos', 'Upload event photos', 'gallery', NOW()), + (gen_random_uuid(), 'gallery.edit', 'Edit Gallery', 'Edit photo captions', 'gallery', NOW()), + (gen_random_uuid(), 'gallery.delete', 'Delete Photos', 'Delete event photos', 'gallery', NOW()), + + -- Subscriptions Permissions + (gen_random_uuid(), 'subscriptions.view', 'View Subscriptions', 'View user subscriptions', 'subscriptions', NOW()), + (gen_random_uuid(), 'subscriptions.plans', 'Manage Plans', 'Manage subscription plans', 'subscriptions', NOW()), + (gen_random_uuid(), 'subscriptions.edit', 'Edit Subscriptions', 'Edit user subscriptions', 'subscriptions', NOW()), + (gen_random_uuid(), 'subscriptions.cancel', 'Cancel Subscriptions', 'Cancel subscriptions', 'subscriptions', NOW()), + (gen_random_uuid(), 'subscriptions.activate', 'Activate Subscriptions', 'Manually activate subscriptions', 'subscriptions', NOW()), + (gen_random_uuid(), 'subscriptions.export', 'Export Subscriptions', 'Export subscription data', 'subscriptions', NOW()), + + -- Donations Permissions + (gen_random_uuid(), 'donations.view', 'View Donations', 'View donation records', 'donations', NOW()), + (gen_random_uuid(), 'donations.export', 'Export Donations', 'Export donation data', 'donations', NOW()), + + -- Financials Permissions (Financial Reports) + (gen_random_uuid(), 'financials.create', 'Create Financial Reports', 'Upload financial reports', 'financials', NOW()), + (gen_random_uuid(), 'financials.edit', 'Edit Financial Reports', 'Edit financial reports', 'financials', NOW()), + (gen_random_uuid(), 'financials.delete', 'Delete Financial Reports', 'Delete financial reports', 'financials', NOW()), + + -- Newsletters Permissions + (gen_random_uuid(), 'newsletters.create', 'Create Newsletters', 'Upload newsletter archives', 'newsletters', NOW()), + (gen_random_uuid(), 'newsletters.edit', 'Edit Newsletters', 'Edit newsletter archives', 'newsletters', NOW()), + (gen_random_uuid(), 'newsletters.delete', 'Delete Newsletters', 'Delete newsletter archives', 'newsletters', NOW()), + + -- Bylaws Permissions + (gen_random_uuid(), 'bylaws.create', 'Create Bylaws', 'Upload bylaws documents', 'bylaws', NOW()), + (gen_random_uuid(), 'bylaws.edit', 'Edit Bylaws', 'Edit bylaws documents', 'bylaws', NOW()), + (gen_random_uuid(), 'bylaws.delete', 'Delete Bylaws', 'Delete bylaws documents', 'bylaws', NOW()), + + -- Settings Permissions + (gen_random_uuid(), 'settings.storage', 'View Storage Usage', 'View storage usage statistics', 'settings', NOW()) +ON CONFLICT (code) DO NOTHING; + +-- ============================================================================ +-- Assign Permissions to Roles +-- ============================================================================ + +-- Guest Role: No permissions +-- (Members can only view their own data through different endpoints) + +-- Member Role: Basic viewing only +INSERT INTO role_permissions (id, role, role_id, permission_id, created_at) +SELECT + gen_random_uuid(), + 'member', + (SELECT id FROM roles WHERE code = 'member'), + p.id, + NOW() +FROM permissions p +WHERE p.code IN ( + 'events.view' +) +ON CONFLICT DO NOTHING; + +-- Admin Role: Full management except financial +INSERT INTO role_permissions (id, role, role_id, permission_id, created_at) +SELECT + gen_random_uuid(), + 'admin', + (SELECT id FROM roles WHERE code = 'admin'), + p.id, + NOW() +FROM permissions p +WHERE p.code IN ( + -- User Management + 'users.view', 'users.create', 'users.edit', 'users.approve', 'users.import', + 'users.export', 'users.status', 'users.reset_password', 'users.resend_verification', + + -- Event Management + 'events.view', 'events.create', 'events.edit', 'events.delete', 'events.rsvps', 'events.attendance', + + -- Gallery + 'gallery.upload', 'gallery.edit', 'gallery.delete', + + -- Content + 'newsletters.create', 'newsletters.edit', 'newsletters.delete', + 'bylaws.create', 'bylaws.edit', 'bylaws.delete', + + -- Settings + 'settings.storage' +) +ON CONFLICT DO NOTHING; + +-- Finance Role: Financial permissions + basic viewing +INSERT INTO role_permissions (id, role, role_id, permission_id, created_at) +SELECT + gen_random_uuid(), + 'finance', + (SELECT id FROM roles WHERE code = 'finance'), + p.id, + NOW() +FROM permissions p +WHERE p.code IN ( + -- Subscriptions & Donations + 'subscriptions.view', 'subscriptions.plans', 'subscriptions.edit', + 'subscriptions.cancel', 'subscriptions.activate', 'subscriptions.export', + 'donations.view', 'donations.export', + + -- Financial Reports + 'financials.create', 'financials.edit', 'financials.delete', + + -- Basic Access + 'users.view', + 'events.view' +) +ON CONFLICT DO NOTHING; + +-- Superadmin Role: ALL permissions +INSERT INTO role_permissions (id, role, role_id, permission_id, created_at) +SELECT + gen_random_uuid(), + 'superadmin', + (SELECT id FROM roles WHERE code = 'superadmin'), + p.id, + NOW() +FROM permissions p +ON CONFLICT DO NOTHING; + +COMMIT; + +\echo '✅ All permissions fixed!' +\echo '' +\echo 'Permission counts by role:' +\echo ' - Guest: 0' +\echo ' - Member: 1' +\echo ' - Admin: ~25' +\echo ' - Finance: ~13' +\echo ' - Superadmin: ALL (40 total)' +\echo '' +\echo 'Next: Restart backend with: pm2 restart membership-backend' diff --git a/migrations/005_complete_permissions.sql b/migrations/005_complete_permissions.sql new file mode 100644 index 0000000..b55605c --- /dev/null +++ b/migrations/005_complete_permissions.sql @@ -0,0 +1,216 @@ +-- ============================================================================ +-- Complete Permission Set (60 permissions from development) +-- Run this to sync production with development permissions +-- ============================================================================ + +BEGIN; + +-- Delete old permissions and mappings +DELETE FROM role_permissions; +DELETE FROM permissions; + +-- ============================================================================ +-- Create ALL 60 permissions (matching development) +-- ============================================================================ + +INSERT INTO permissions (id, code, name, description, module, created_at) +VALUES + -- Users Permissions (11) + (gen_random_uuid(), 'users.view', 'View Users', 'View user list and profiles', 'users', NOW()), + (gen_random_uuid(), 'users.create', 'Create Users', 'Create new users', 'users', NOW()), + (gen_random_uuid(), 'users.edit', 'Edit Users', 'Edit user information', 'users', NOW()), + (gen_random_uuid(), 'users.delete', 'Delete Users', 'Delete users', 'users', NOW()), + (gen_random_uuid(), 'users.status', 'Change User Status', 'Update user status', 'users', NOW()), + (gen_random_uuid(), 'users.approve', 'Approve Users', 'Approve pending memberships', 'users', NOW()), + (gen_random_uuid(), 'users.export', 'Export Users', 'Export users to CSV', 'users', NOW()), + (gen_random_uuid(), 'users.import', 'Import Users', 'Import users from CSV', 'users', NOW()), + (gen_random_uuid(), 'users.reset_password', 'Reset User Password', 'Reset user passwords', 'users', NOW()), + (gen_random_uuid(), 'users.resend_verification', 'Resend Verification', 'Resend email verification', 'users', NOW()), + (gen_random_uuid(), 'users.invite', 'Invite Users', 'Send user invitations', 'users', NOW()), + + -- Events Permissions (8) + (gen_random_uuid(), 'events.view', 'View Events', 'View event list', 'events', NOW()), + (gen_random_uuid(), 'events.create', 'Create Events', 'Create new events', 'events', NOW()), + (gen_random_uuid(), 'events.edit', 'Edit Events', 'Edit event information', 'events', NOW()), + (gen_random_uuid(), 'events.delete', 'Delete Events', 'Delete events', 'events', NOW()), + (gen_random_uuid(), 'events.publish', 'Publish Events', 'Publish/unpublish events', 'events', NOW()), + (gen_random_uuid(), 'events.attendance', 'Manage Attendance', 'Mark attendance', 'events', NOW()), + (gen_random_uuid(), 'events.rsvps', 'View RSVPs', 'View event RSVPs', 'events', NOW()), + (gen_random_uuid(), 'events.calendar_export', 'Export Calendar', 'Export events to calendar', 'events', NOW()), + + -- Subscriptions Permissions (7) + (gen_random_uuid(), 'subscriptions.view', 'View Subscriptions', 'View user subscriptions', 'subscriptions', NOW()), + (gen_random_uuid(), 'subscriptions.create', 'Create Subscriptions', 'Create new subscriptions', 'subscriptions', NOW()), + (gen_random_uuid(), 'subscriptions.edit', 'Edit Subscriptions', 'Edit user subscriptions', 'subscriptions', NOW()), + (gen_random_uuid(), 'subscriptions.cancel', 'Cancel Subscriptions', 'Cancel subscriptions', 'subscriptions', NOW()), + (gen_random_uuid(), 'subscriptions.activate', 'Activate Subscriptions', 'Manually activate subscriptions', 'subscriptions', NOW()), + (gen_random_uuid(), 'subscriptions.plans', 'Manage Plans', 'Manage subscription plans', 'subscriptions', NOW()), + (gen_random_uuid(), 'subscriptions.export', 'Export Subscriptions', 'Export subscription data', 'subscriptions', NOW()), + + -- Donations Permissions (2) + (gen_random_uuid(), 'donations.view', 'View Donations', 'View donation records', 'donations', NOW()), + (gen_random_uuid(), 'donations.export', 'Export Donations', 'Export donation data', 'donations', NOW()), + + -- Financials Permissions (6) + (gen_random_uuid(), 'financials.view', 'View Financial Reports', 'View financial reports', 'financials', NOW()), + (gen_random_uuid(), 'financials.create', 'Create Financial Reports', 'Upload financial reports', 'financials', NOW()), + (gen_random_uuid(), 'financials.edit', 'Edit Financial Reports', 'Edit financial reports', 'financials', NOW()), + (gen_random_uuid(), 'financials.delete', 'Delete Financial Reports', 'Delete financial reports', 'financials', NOW()), + (gen_random_uuid(), 'financials.export', 'Export Financial Data', 'Export financial data', 'financials', NOW()), + (gen_random_uuid(), 'financials.payments', 'Manage Payments', 'Process manual payments', 'financials', NOW()), + + -- Newsletters Permissions (6) + (gen_random_uuid(), 'newsletters.view', 'View Newsletters', 'View newsletter archives', 'newsletters', NOW()), + (gen_random_uuid(), 'newsletters.create', 'Create Newsletters', 'Upload newsletter archives', 'newsletters', NOW()), + (gen_random_uuid(), 'newsletters.edit', 'Edit Newsletters', 'Edit newsletter archives', 'newsletters', NOW()), + (gen_random_uuid(), 'newsletters.delete', 'Delete Newsletters', 'Delete newsletter archives', 'newsletters', NOW()), + (gen_random_uuid(), 'newsletters.send', 'Send Newsletters', 'Send newsletters to subscribers', 'newsletters', NOW()), + (gen_random_uuid(), 'newsletters.subscribers', 'Manage Subscribers', 'Manage newsletter subscribers', 'newsletters', NOW()), + + -- Bylaws Permissions (5) + (gen_random_uuid(), 'bylaws.view', 'View Bylaws', 'View bylaws documents', 'bylaws', NOW()), + (gen_random_uuid(), 'bylaws.create', 'Create Bylaws', 'Upload bylaws documents', 'bylaws', NOW()), + (gen_random_uuid(), 'bylaws.edit', 'Edit Bylaws', 'Edit bylaws documents', 'bylaws', NOW()), + (gen_random_uuid(), 'bylaws.delete', 'Delete Bylaws', 'Delete bylaws documents', 'bylaws', NOW()), + (gen_random_uuid(), 'bylaws.publish', 'Publish Bylaws', 'Mark bylaws as current', 'bylaws', NOW()), + + -- Gallery Permissions (5) + (gen_random_uuid(), 'gallery.view', 'View Gallery', 'View event galleries', 'gallery', NOW()), + (gen_random_uuid(), 'gallery.upload', 'Upload Photos', 'Upload event photos', 'gallery', NOW()), + (gen_random_uuid(), 'gallery.edit', 'Edit Gallery', 'Edit photo captions', 'gallery', NOW()), + (gen_random_uuid(), 'gallery.delete', 'Delete Photos', 'Delete event photos', 'gallery', NOW()), + (gen_random_uuid(), 'gallery.moderate', 'Moderate Gallery', 'Approve/reject gallery submissions', 'gallery', NOW()), + + -- Settings Permissions (6) + (gen_random_uuid(), 'settings.view', 'View Settings', 'View system settings', 'settings', NOW()), + (gen_random_uuid(), 'settings.edit', 'Edit Settings', 'Edit system settings', 'settings', NOW()), + (gen_random_uuid(), 'settings.email_templates', 'Manage Email Templates', 'Edit email templates', 'settings', NOW()), + (gen_random_uuid(), 'settings.storage', 'View Storage Usage', 'View storage usage statistics', 'settings', NOW()), + (gen_random_uuid(), 'settings.backup', 'Backup System', 'Create system backups', 'settings', NOW()), + (gen_random_uuid(), 'settings.logs', 'View Logs', 'View system logs', 'settings', NOW()), + + -- Permissions Management (4) + (gen_random_uuid(), 'permissions.view', 'View Permissions', 'View permission list', 'permissions', NOW()), + (gen_random_uuid(), 'permissions.assign', 'Assign Permissions', 'Assign permissions to roles', 'permissions', NOW()), + (gen_random_uuid(), 'permissions.manage_roles', 'Manage Roles', 'Create/edit roles', 'permissions', NOW()), + (gen_random_uuid(), 'permissions.audit', 'View Audit Logs', 'View permission audit logs', 'permissions', NOW()) + +ON CONFLICT (code) DO NOTHING; + +-- ============================================================================ +-- Assign Permissions to Roles +-- ============================================================================ + +-- Guest Role: No permissions + +-- Member Role: Basic viewing only +INSERT INTO role_permissions (id, role, role_id, permission_id, created_at) +SELECT + gen_random_uuid(), + 'member', + (SELECT id FROM roles WHERE code = 'member'), + p.id, + NOW() +FROM permissions p +WHERE p.code IN ( + 'events.view', + 'gallery.view', + 'bylaws.view', + 'newsletters.view' +) +ON CONFLICT DO NOTHING; + +-- Admin Role: Most permissions except financials and permissions management +INSERT INTO role_permissions (id, role, role_id, permission_id, created_at) +SELECT + gen_random_uuid(), + 'admin', + (SELECT id FROM roles WHERE code = 'admin'), + p.id, + NOW() +FROM permissions p +WHERE p.code IN ( + -- User Management + 'users.view', 'users.create', 'users.edit', 'users.approve', 'users.import', + 'users.export', 'users.status', 'users.reset_password', 'users.resend_verification', 'users.invite', + + -- Event Management + 'events.view', 'events.create', 'events.edit', 'events.delete', 'events.publish', + 'events.rsvps', 'events.attendance', 'events.calendar_export', + + -- Gallery + 'gallery.view', 'gallery.upload', 'gallery.edit', 'gallery.delete', 'gallery.moderate', + + -- Content + 'newsletters.view', 'newsletters.create', 'newsletters.edit', 'newsletters.delete', + 'newsletters.send', 'newsletters.subscribers', + 'bylaws.view', 'bylaws.create', 'bylaws.edit', 'bylaws.delete', 'bylaws.publish', + + -- Settings (limited) + 'settings.view', 'settings.storage', 'settings.logs' +) +ON CONFLICT DO NOTHING; + +-- Finance Role: Financial permissions + basic viewing +INSERT INTO role_permissions (id, role, role_id, permission_id, created_at) +SELECT + gen_random_uuid(), + 'finance', + (SELECT id FROM roles WHERE code = 'finance'), + p.id, + NOW() +FROM permissions p +WHERE p.code IN ( + -- Subscriptions & Donations + 'subscriptions.view', 'subscriptions.create', 'subscriptions.plans', 'subscriptions.edit', + 'subscriptions.cancel', 'subscriptions.activate', 'subscriptions.export', + 'donations.view', 'donations.export', + + -- Financial Reports + 'financials.view', 'financials.create', 'financials.edit', 'financials.delete', + 'financials.export', 'financials.payments', + + -- Basic Access + 'users.view', + 'events.view', + 'bylaws.view', + 'newsletters.view' +) +ON CONFLICT DO NOTHING; + +-- Superadmin Role: ALL 60 permissions +INSERT INTO role_permissions (id, role, role_id, permission_id, created_at) +SELECT + gen_random_uuid(), + 'superadmin', + (SELECT id FROM roles WHERE code = 'superadmin'), + p.id, + NOW() +FROM permissions p +ON CONFLICT DO NOTHING; + +COMMIT; + +\echo '✅ Complete permission set created!' +\echo '' +\echo 'Permission counts:' +\echo ' Total permissions: 60' +\echo ' - users: 11' +\echo ' - events: 8' +\echo ' - subscriptions: 7' +\echo ' - donations: 2' +\echo ' - financials: 6' +\echo ' - newsletters: 6' +\echo ' - bylaws: 5' +\echo ' - gallery: 5' +\echo ' - settings: 6' +\echo ' - permissions: 4' +\echo '' +\echo 'Role assignments:' +\echo ' - Guest: 0' +\echo ' - Member: 4 (view only)' +\echo ' - Admin: ~40' +\echo ' - Finance: ~20' +\echo ' - Superadmin: 60 (all)' +\echo '' +\echo 'Next: Restart backend with: pm2 restart membership-backend' diff --git a/migrations/create_tables_only.sql b/migrations/create_tables_only.sql new file mode 100644 index 0000000..8276175 --- /dev/null +++ b/migrations/create_tables_only.sql @@ -0,0 +1,394 @@ +-- ============================================================================ +-- Create Tables Only (ENUMs already exist) +-- Run this when ENUMs exist but tables don't +-- ============================================================================ + +BEGIN; + +-- ============================================================================ +-- STEP 1: Core Tables +-- ============================================================================ + +-- Users table +CREATE TABLE IF NOT EXISTS users ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + email VARCHAR(255) UNIQUE NOT NULL, + password_hash VARCHAR(255) NOT NULL, + first_name VARCHAR(100) NOT NULL, + last_name VARCHAR(100) NOT NULL, + phone VARCHAR(20), + address TEXT, + city VARCHAR(100), + state VARCHAR(2), + zipcode VARCHAR(10), + date_of_birth DATE, + + -- Profile + profile_image_url TEXT, + bio TEXT, + interests TEXT, + + -- Partner Information + partner_first_name VARCHAR(100), + partner_last_name VARCHAR(100), + partner_is_member BOOLEAN DEFAULT FALSE, + partner_plan_to_become_member BOOLEAN DEFAULT FALSE, + + -- Referral + referred_by_member_name VARCHAR(200), + + -- Newsletter Preferences + newsletter_subscribed BOOLEAN DEFAULT TRUE, + newsletter_publish_name BOOLEAN DEFAULT FALSE, + newsletter_publish_photo BOOLEAN DEFAULT FALSE, + newsletter_publish_birthday BOOLEAN DEFAULT FALSE, + newsletter_publish_none BOOLEAN DEFAULT FALSE, + + -- Volunteer & Scholarship + volunteer_interests TEXT, + scholarship_requested BOOLEAN DEFAULT FALSE, + + -- Directory + show_in_directory BOOLEAN DEFAULT TRUE, + + -- Lead Sources (JSON array) + lead_sources JSONB DEFAULT '[]'::jsonb, + + -- Status & Role + status userstatus DEFAULT 'pending_email' NOT NULL, + role userrole DEFAULT 'guest' NOT NULL, + role_id UUID, + + -- Rejection Tracking + rejection_reason TEXT, + rejected_at TIMESTAMP WITH TIME ZONE, + rejected_by UUID REFERENCES users(id), + + -- Membership + member_since DATE, + accepts_tos BOOLEAN DEFAULT FALSE, + tos_accepted_at TIMESTAMP WITH TIME ZONE, + + -- Reminder Tracking (from migration 004) + email_verification_reminders_sent INTEGER DEFAULT 0 NOT NULL, + last_email_verification_reminder_at TIMESTAMP WITH TIME ZONE, + event_attendance_reminders_sent INTEGER DEFAULT 0 NOT NULL, + last_event_attendance_reminder_at TIMESTAMP WITH TIME ZONE, + payment_reminders_sent INTEGER DEFAULT 0 NOT NULL, + last_payment_reminder_at TIMESTAMP WITH TIME ZONE, + renewal_reminders_sent INTEGER DEFAULT 0 NOT NULL, + last_renewal_reminder_at TIMESTAMP WITH TIME ZONE, + + -- WordPress Import Tracking + import_source VARCHAR(50), + import_job_id UUID, + wordpress_user_id BIGINT, + wordpress_registered_date TIMESTAMP WITH TIME ZONE, + + -- Authentication + email_verified BOOLEAN DEFAULT FALSE, + email_verification_token VARCHAR(255), + email_verification_expires TIMESTAMP WITH TIME ZONE, + password_reset_token VARCHAR(255), + password_reset_expires TIMESTAMP WITH TIME ZONE, + force_password_change BOOLEAN DEFAULT FALSE, + + -- Timestamps + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +-- Events table +CREATE TABLE IF NOT EXISTS events ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + title VARCHAR(255) NOT NULL, + description TEXT, + location VARCHAR(255), + start_at TIMESTAMP WITH TIME ZONE NOT NULL, + end_at TIMESTAMP WITH TIME ZONE NOT NULL, + capacity INTEGER, + published BOOLEAN DEFAULT FALSE, + calendar_uid VARCHAR(255) UNIQUE, + created_by UUID REFERENCES users(id), + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +-- Event RSVPs +CREATE TABLE IF NOT EXISTS event_rsvps ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + event_id UUID NOT NULL REFERENCES events(id) ON DELETE CASCADE, + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + rsvp_status rsvpstatus NOT NULL, + attended BOOLEAN DEFAULT FALSE, + attended_at TIMESTAMP WITH TIME ZONE, + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + UNIQUE(event_id, user_id) +); + +-- Event Gallery +CREATE TABLE IF NOT EXISTS event_galleries ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + event_id UUID NOT NULL REFERENCES events(id) ON DELETE CASCADE, + image_url TEXT NOT NULL, + caption TEXT, + uploaded_by UUID NOT NULL REFERENCES users(id), + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +-- Import Jobs +CREATE TABLE IF NOT EXISTS import_jobs ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + filename VARCHAR(255) NOT NULL, + file_key VARCHAR(255), + total_rows INTEGER NOT NULL, + processed_rows INTEGER DEFAULT 0, + successful_rows INTEGER DEFAULT 0, + failed_rows INTEGER DEFAULT 0, + status importjobstatus DEFAULT 'processing' NOT NULL, + errors JSONB DEFAULT '[]'::jsonb, + + -- WordPress import enhancements + field_mapping JSONB DEFAULT '{}'::jsonb, + wordpress_metadata JSONB DEFAULT '{}'::jsonb, + imported_user_ids JSONB DEFAULT '[]'::jsonb, + rollback_at TIMESTAMP WITH TIME ZONE, + rollback_by UUID REFERENCES users(id), + + imported_by UUID NOT NULL REFERENCES users(id), + started_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + completed_at TIMESTAMP WITH TIME ZONE +); + +COMMIT; + +-- ============================================================================ +-- STEP 2: Subscription & Payment Tables +-- ============================================================================ + +BEGIN; + +CREATE TABLE IF NOT EXISTS subscription_plans ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + name VARCHAR(100) NOT NULL, + description TEXT, + price_cents INTEGER NOT NULL, + billing_cycle VARCHAR(20) NOT NULL, + stripe_price_id VARCHAR(255), + custom_cycle_enabled BOOLEAN DEFAULT FALSE, + minimum_price_cents INTEGER DEFAULT 0, + allow_donation BOOLEAN DEFAULT FALSE, + is_active BOOLEAN DEFAULT TRUE, + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +CREATE TABLE IF NOT EXISTS subscriptions ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + plan_id UUID NOT NULL REFERENCES subscription_plans(id), + stripe_subscription_id VARCHAR(255), + stripe_customer_id VARCHAR(255), + base_subscription_cents INTEGER NOT NULL, + donation_cents INTEGER DEFAULT 0, + status subscriptionstatus DEFAULT 'active' NOT NULL, + current_period_start TIMESTAMP WITH TIME ZONE, + current_period_end TIMESTAMP WITH TIME ZONE, + cancel_at_period_end BOOLEAN DEFAULT FALSE, + canceled_at TIMESTAMP WITH TIME ZONE, + manual_payment BOOLEAN DEFAULT FALSE, + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +CREATE TABLE IF NOT EXISTS donations ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID REFERENCES users(id), + amount_cents INTEGER NOT NULL, + donation_type donationtype NOT NULL, + status donationstatus DEFAULT 'pending' NOT NULL, + stripe_payment_intent_id VARCHAR(255), + donor_name VARCHAR(200), + donor_email VARCHAR(255), + message TEXT, + is_anonymous BOOLEAN DEFAULT FALSE, + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + completed_at TIMESTAMP WITH TIME ZONE +); + +COMMIT; + +-- ============================================================================ +-- STEP 3: RBAC Tables +-- ============================================================================ + +BEGIN; + +CREATE TABLE IF NOT EXISTS permissions ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + code VARCHAR(100) UNIQUE NOT NULL, + name VARCHAR(200) NOT NULL, + description TEXT, + module VARCHAR(50), + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +CREATE TABLE IF NOT EXISTS roles ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + code VARCHAR(50) UNIQUE NOT NULL, + name VARCHAR(100) NOT NULL, + description TEXT, + is_system_role BOOLEAN DEFAULT FALSE, + created_by UUID REFERENCES users(id), + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +CREATE TABLE IF NOT EXISTS role_permissions ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + role VARCHAR(50), + role_id UUID REFERENCES roles(id) ON DELETE CASCADE, + permission_id UUID NOT NULL REFERENCES permissions(id) ON DELETE CASCADE, + created_by UUID REFERENCES users(id), + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +CREATE TABLE IF NOT EXISTS user_invitations ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + email VARCHAR(255) NOT NULL, + role userrole NOT NULL, + token VARCHAR(255) UNIQUE NOT NULL, + invited_by UUID NOT NULL REFERENCES users(id), + invited_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + expires_at TIMESTAMP WITH TIME ZONE NOT NULL, + accepted_by UUID REFERENCES users(id), + accepted_at TIMESTAMP WITH TIME ZONE, + status invitationstatus DEFAULT 'pending' NOT NULL +); + +COMMIT; + +-- ============================================================================ +-- STEP 4: Document Management Tables +-- ============================================================================ + +BEGIN; + +CREATE TABLE IF NOT EXISTS newsletter_archives ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + title VARCHAR(255) NOT NULL, + description TEXT, + published_date TIMESTAMP WITH TIME ZONE NOT NULL, + document_url TEXT NOT NULL, + document_type VARCHAR(50) NOT NULL, + created_by UUID NOT NULL REFERENCES users(id), + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +CREATE TABLE IF NOT EXISTS financial_reports ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + year INTEGER NOT NULL, + title VARCHAR(255) NOT NULL, + document_url TEXT NOT NULL, + document_type VARCHAR(50) NOT NULL, + created_by UUID NOT NULL REFERENCES users(id), + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +CREATE TABLE IF NOT EXISTS bylaws_documents ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + title VARCHAR(255) NOT NULL, + version VARCHAR(50) NOT NULL, + effective_date TIMESTAMP WITH TIME ZONE NOT NULL, + document_url TEXT NOT NULL, + document_type VARCHAR(50) NOT NULL, + is_current BOOLEAN DEFAULT FALSE, + created_by UUID NOT NULL REFERENCES users(id), + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +COMMIT; + +-- ============================================================================ +-- STEP 5: System Tables +-- ============================================================================ + +BEGIN; + +CREATE TABLE IF NOT EXISTS storage_usage ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + total_bytes_used BIGINT DEFAULT 0, + max_bytes_allowed BIGINT, + last_calculated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +CREATE TABLE IF NOT EXISTS import_rollback_audit ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + import_job_id UUID NOT NULL REFERENCES import_jobs(id), + rolled_back_by UUID NOT NULL REFERENCES users(id), + rolled_back_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + deleted_user_count INTEGER NOT NULL, + deleted_user_ids JSONB NOT NULL, + reason TEXT, + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +-- Initialize storage_usage with default row +INSERT INTO storage_usage (id, total_bytes_used, max_bytes_allowed) +VALUES (gen_random_uuid(), 0, 107374182400) -- 100GB limit +ON CONFLICT DO NOTHING; + +COMMIT; + +-- ============================================================================ +-- STEP 6: Create Indexes +-- ============================================================================ + +BEGIN; + +-- Users indexes +CREATE INDEX IF NOT EXISTS idx_users_email ON users(email); +CREATE INDEX IF NOT EXISTS idx_users_status ON users(status); +CREATE INDEX IF NOT EXISTS idx_users_role ON users(role); +CREATE INDEX IF NOT EXISTS idx_users_created_at ON users(created_at); + +-- Events indexes +CREATE INDEX IF NOT EXISTS idx_events_start_at ON events(start_at); +CREATE INDEX IF NOT EXISTS idx_events_published ON events(published); +CREATE INDEX IF NOT EXISTS idx_events_created_by ON events(created_by); + +-- Event RSVPs indexes +CREATE INDEX IF NOT EXISTS idx_event_rsvps_event_id ON event_rsvps(event_id); +CREATE INDEX IF NOT EXISTS idx_event_rsvps_user_id ON event_rsvps(user_id); +CREATE INDEX IF NOT EXISTS idx_event_rsvps_attended ON event_rsvps(attended); + +-- Subscriptions indexes +CREATE INDEX IF NOT EXISTS idx_subscriptions_user_id ON subscriptions(user_id); +CREATE INDEX IF NOT EXISTS idx_subscriptions_status ON subscriptions(status); +CREATE INDEX IF NOT EXISTS idx_subscriptions_stripe_customer_id ON subscriptions(stripe_customer_id); + +-- Permissions indexes +CREATE INDEX IF NOT EXISTS ix_permissions_code ON permissions(code); +CREATE INDEX IF NOT EXISTS ix_permissions_module ON permissions(module); + +-- Roles indexes +CREATE INDEX IF NOT EXISTS ix_roles_code ON roles(code); + +-- Role permissions indexes +CREATE INDEX IF NOT EXISTS ix_role_permissions_role ON role_permissions(role); +CREATE INDEX IF NOT EXISTS ix_role_permissions_role_id ON role_permissions(role_id); + +-- User invitations indexes +CREATE INDEX IF NOT EXISTS ix_user_invitations_email ON user_invitations(email); +CREATE INDEX IF NOT EXISTS ix_user_invitations_token ON user_invitations(token); + +COMMIT; + +\echo '✅ All tables created successfully!' +\echo 'Run: psql ... -c "\dt" to verify' diff --git a/migrations/diagnose_database.sql b/migrations/diagnose_database.sql new file mode 100644 index 0000000..83754a5 --- /dev/null +++ b/migrations/diagnose_database.sql @@ -0,0 +1,80 @@ +-- ============================================================================ +-- Database Diagnostic Script +-- Run this to check what exists in your database +-- ============================================================================ + +\echo '=== CHECKING ENUMS ===' +SELECT + t.typname as enum_name, + string_agg(e.enumlabel, ', ' ORDER BY e.enumsortorder) as values +FROM pg_type t +JOIN pg_enum e ON t.oid = e.enumtypid +WHERE t.typname IN ( + 'userstatus', 'userrole', 'rsvpstatus', 'subscriptionstatus', + 'donationtype', 'donationstatus', 'invitationstatus', 'importjobstatus' +) +GROUP BY t.typname +ORDER BY t.typname; + +\echo '' +\echo '=== CHECKING TABLES ===' +SELECT + schemaname, + tablename +FROM pg_tables +WHERE schemaname = 'public' +ORDER BY tablename; + +\echo '' +\echo '=== CHECKING USERS TABLE STRUCTURE ===' +SELECT + column_name, + data_type, + is_nullable, + column_default +FROM information_schema.columns +WHERE table_name = 'users' +ORDER BY ordinal_position; + +\echo '' +\echo '=== CHECKING FOR CRITICAL FIELDS ===' +\echo 'Checking if reminder tracking fields exist...' +SELECT EXISTS ( + SELECT 1 + FROM information_schema.columns + WHERE table_name = 'users' + AND column_name = 'email_verification_reminders_sent' +) as has_reminder_fields; + +\echo '' +\echo 'Checking if accepts_tos field exists (should be accepts_tos, not tos_accepted)...' +SELECT column_name +FROM information_schema.columns +WHERE table_name = 'users' +AND column_name IN ('accepts_tos', 'tos_accepted'); + +\echo '' +\echo 'Checking if WordPress import fields exist...' +SELECT EXISTS ( + SELECT 1 + FROM information_schema.columns + WHERE table_name = 'users' + AND column_name = 'import_source' +) as has_import_fields; + +\echo '' +\echo '=== CHECKING IMPORT_JOBS TABLE ===' +SELECT column_name +FROM information_schema.columns +WHERE table_name = 'import_jobs' +ORDER BY ordinal_position; + +\echo '' +\echo '=== SUMMARY ===' +SELECT + (SELECT COUNT(*) FROM pg_type WHERE typname IN ( + 'userstatus', 'userrole', 'rsvpstatus', 'subscriptionstatus', + 'donationtype', 'donationstatus', 'invitationstatus', 'importjobstatus' + )) as enum_count, + (SELECT COUNT(*) FROM pg_tables WHERE schemaname = 'public') as table_count, + (SELECT COUNT(*) FROM information_schema.columns WHERE table_name = 'users') as users_column_count; diff --git a/migrations/fix_missing_fields.sql b/migrations/fix_missing_fields.sql new file mode 100644 index 0000000..6f6020c --- /dev/null +++ b/migrations/fix_missing_fields.sql @@ -0,0 +1,169 @@ +-- ============================================================================ +-- Fix Missing Fields Script +-- Safely adds missing fields without recreating existing structures +-- ============================================================================ + +BEGIN; + +\echo '=== FIXING USERS TABLE ===' + +-- Fix TOS field name if needed (tos_accepted -> accepts_tos) +DO $$ +BEGIN + IF EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = 'users' AND column_name = 'tos_accepted' + ) AND NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = 'users' AND column_name = 'accepts_tos' + ) THEN + ALTER TABLE users RENAME COLUMN tos_accepted TO accepts_tos; + RAISE NOTICE 'Renamed tos_accepted to accepts_tos'; + END IF; +END $$; + +-- Add reminder tracking fields if missing +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = 'users' AND column_name = 'email_verification_reminders_sent' + ) THEN + ALTER TABLE users ADD COLUMN email_verification_reminders_sent INTEGER DEFAULT 0 NOT NULL; + ALTER TABLE users ADD COLUMN last_email_verification_reminder_at TIMESTAMP WITH TIME ZONE; + ALTER TABLE users ADD COLUMN event_attendance_reminders_sent INTEGER DEFAULT 0 NOT NULL; + ALTER TABLE users ADD COLUMN last_event_attendance_reminder_at TIMESTAMP WITH TIME ZONE; + ALTER TABLE users ADD COLUMN payment_reminders_sent INTEGER DEFAULT 0 NOT NULL; + ALTER TABLE users ADD COLUMN last_payment_reminder_at TIMESTAMP WITH TIME ZONE; + ALTER TABLE users ADD COLUMN renewal_reminders_sent INTEGER DEFAULT 0 NOT NULL; + ALTER TABLE users ADD COLUMN last_renewal_reminder_at TIMESTAMP WITH TIME ZONE; + RAISE NOTICE 'Added reminder tracking fields'; + END IF; +END $$; + +-- Add WordPress import fields if missing +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = 'users' AND column_name = 'import_source' + ) THEN + ALTER TABLE users ADD COLUMN import_source VARCHAR(50); + ALTER TABLE users ADD COLUMN import_job_id UUID REFERENCES import_jobs(id); + ALTER TABLE users ADD COLUMN wordpress_user_id BIGINT; + ALTER TABLE users ADD COLUMN wordpress_registered_date TIMESTAMP WITH TIME ZONE; + RAISE NOTICE 'Added WordPress import tracking fields'; + END IF; +END $$; + +\echo '=== FIXING IMPORT_JOBS TABLE ===' + +-- Add WordPress import enhancement fields if missing +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = 'import_jobs' AND column_name = 'field_mapping' + ) THEN + ALTER TABLE import_jobs ADD COLUMN field_mapping JSONB DEFAULT '{}'::jsonb; + ALTER TABLE import_jobs ADD COLUMN wordpress_metadata JSONB DEFAULT '{}'::jsonb; + ALTER TABLE import_jobs ADD COLUMN imported_user_ids JSONB DEFAULT '[]'::jsonb; + ALTER TABLE import_jobs ADD COLUMN rollback_at TIMESTAMP WITH TIME ZONE; + ALTER TABLE import_jobs ADD COLUMN rollback_by UUID REFERENCES users(id); + RAISE NOTICE 'Added WordPress import enhancement fields to import_jobs'; + END IF; +END $$; + +-- Add validating, preview_ready, rolled_back to ImportJobStatus enum if missing +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM pg_enum e + JOIN pg_type t ON e.enumtypid = t.oid + WHERE t.typname = 'importjobstatus' AND e.enumlabel = 'validating' + ) THEN + ALTER TYPE importjobstatus ADD VALUE IF NOT EXISTS 'validating'; + RAISE NOTICE 'Added validating to importjobstatus enum'; + END IF; + + IF NOT EXISTS ( + SELECT 1 FROM pg_enum e + JOIN pg_type t ON e.enumtypid = t.oid + WHERE t.typname = 'importjobstatus' AND e.enumlabel = 'preview_ready' + ) THEN + ALTER TYPE importjobstatus ADD VALUE IF NOT EXISTS 'preview_ready'; + RAISE NOTICE 'Added preview_ready to importjobstatus enum'; + END IF; + + IF NOT EXISTS ( + SELECT 1 FROM pg_enum e + JOIN pg_type t ON e.enumtypid = t.oid + WHERE t.typname = 'importjobstatus' AND e.enumlabel = 'rolled_back' + ) THEN + ALTER TYPE importjobstatus ADD VALUE IF NOT EXISTS 'rolled_back'; + RAISE NOTICE 'Added rolled_back to importjobstatus enum'; + END IF; +END $$; + +-- Add pending_validation, pre_validated, canceled, expired, abandoned to UserStatus enum if missing +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM pg_enum e + JOIN pg_type t ON e.enumtypid = t.oid + WHERE t.typname = 'userstatus' AND e.enumlabel = 'pending_validation' + ) THEN + ALTER TYPE userstatus ADD VALUE IF NOT EXISTS 'pending_validation'; + RAISE NOTICE 'Added pending_validation to userstatus enum'; + END IF; + + IF NOT EXISTS ( + SELECT 1 FROM pg_enum e + JOIN pg_type t ON e.enumtypid = t.oid + WHERE t.typname = 'userstatus' AND e.enumlabel = 'pre_validated' + ) THEN + ALTER TYPE userstatus ADD VALUE IF NOT EXISTS 'pre_validated'; + RAISE NOTICE 'Added pre_validated to userstatus enum'; + END IF; + + IF NOT EXISTS ( + SELECT 1 FROM pg_enum e + JOIN pg_type t ON e.enumtypid = t.oid + WHERE t.typname = 'userstatus' AND e.enumlabel = 'canceled' + ) THEN + ALTER TYPE userstatus ADD VALUE IF NOT EXISTS 'canceled'; + RAISE NOTICE 'Added canceled to userstatus enum'; + END IF; + + IF NOT EXISTS ( + SELECT 1 FROM pg_enum e + JOIN pg_type t ON e.enumtypid = t.oid + WHERE t.typname = 'userstatus' AND e.enumlabel = 'expired' + ) THEN + ALTER TYPE userstatus ADD VALUE IF NOT EXISTS 'expired'; + RAISE NOTICE 'Added expired to userstatus enum'; + END IF; + + IF NOT EXISTS ( + SELECT 1 FROM pg_enum e + JOIN pg_type t ON e.enumtypid = t.oid + WHERE t.typname = 'userstatus' AND e.enumlabel = 'abandoned' + ) THEN + ALTER TYPE userstatus ADD VALUE IF NOT EXISTS 'abandoned'; + RAISE NOTICE 'Added abandoned to userstatus enum'; + END IF; +END $$; + +COMMIT; + +\echo '' +\echo '=== VERIFICATION ===' +SELECT + (SELECT COUNT(*) FROM information_schema.columns WHERE table_name = 'users') as users_columns, + (SELECT COUNT(*) FROM information_schema.columns WHERE table_name = 'import_jobs') as import_jobs_columns, + (SELECT COUNT(*) FROM pg_enum e JOIN pg_type t ON e.enumtypid = t.oid WHERE t.typname = 'userstatus') as userstatus_values, + (SELECT COUNT(*) FROM pg_enum e JOIN pg_type t ON e.enumtypid = t.oid WHERE t.typname = 'importjobstatus') as importjobstatus_values; + +\echo '' +\echo '✅ Missing fields have been added!' +\echo 'You can now run: alembic stamp head' diff --git a/migrations/seed_data.sql b/migrations/seed_data.sql new file mode 100644 index 0000000..af8dc05 --- /dev/null +++ b/migrations/seed_data.sql @@ -0,0 +1,238 @@ +-- ============================================================================ +-- Seed Data for LOAF Membership Platform +-- Run this after creating the database schema +-- ============================================================================ + +BEGIN; + +-- ============================================================================ +-- STEP 1: Create Default Roles +-- ============================================================================ + +INSERT INTO roles (id, code, name, description, is_system_role, created_at, updated_at) +VALUES + (gen_random_uuid(), 'guest', 'Guest', 'Default role for new registrations', true, NOW(), NOW()), + (gen_random_uuid(), 'member', 'Member', 'Active paying members with full access', true, NOW(), NOW()), + (gen_random_uuid(), 'admin', 'Admin', 'Board members with management access', true, NOW(), NOW()), + (gen_random_uuid(), 'finance', 'Finance', 'Treasurer role with financial access', true, NOW(), NOW()), + (gen_random_uuid(), 'superadmin', 'Super Admin', 'Full system access', true, NOW(), NOW()) +ON CONFLICT (code) DO NOTHING; + +-- ============================================================================ +-- STEP 2: Create Permissions +-- ============================================================================ + +INSERT INTO permissions (id, code, name, description, module, created_at) +VALUES + -- User Management Permissions + (gen_random_uuid(), 'users.view', 'View Users', 'View user list and profiles', 'users', NOW()), + (gen_random_uuid(), 'users.create', 'Create Users', 'Create new users', 'users', NOW()), + (gen_random_uuid(), 'users.edit', 'Edit Users', 'Edit user information', 'users', NOW()), + (gen_random_uuid(), 'users.delete', 'Delete Users', 'Delete users', 'users', NOW()), + (gen_random_uuid(), 'users.approve', 'Approve Users', 'Approve pending memberships', 'users', NOW()), + (gen_random_uuid(), 'users.import', 'Import Users', 'Import users from CSV/external sources', 'users', NOW()), + + -- Event Management Permissions + (gen_random_uuid(), 'events.view', 'View Events', 'View event list and details', 'events', NOW()), + (gen_random_uuid(), 'events.create', 'Create Events', 'Create new events', 'events', NOW()), + (gen_random_uuid(), 'events.edit', 'Edit Events', 'Edit event information', 'events', NOW()), + (gen_random_uuid(), 'events.delete', 'Delete Events', 'Delete events', 'events', NOW()), + (gen_random_uuid(), 'events.publish', 'Publish Events', 'Publish/unpublish events', 'events', NOW()), + (gen_random_uuid(), 'events.manage_attendance', 'Manage Attendance', 'Mark event attendance', 'events', NOW()), + + -- Financial Permissions + (gen_random_uuid(), 'finance.view', 'View Financial Data', 'View subscriptions and payments', 'finance', NOW()), + (gen_random_uuid(), 'finance.manage_plans', 'Manage Subscription Plans', 'Create/edit subscription plans', 'finance', NOW()), + (gen_random_uuid(), 'finance.manage_subscriptions', 'Manage Subscriptions', 'Manage user subscriptions', 'finance', NOW()), + (gen_random_uuid(), 'finance.view_reports', 'View Financial Reports', 'Access financial reports', 'finance', NOW()), + (gen_random_uuid(), 'finance.export', 'Export Financial Data', 'Export financial data', 'finance', NOW()), + + -- Content Management Permissions + (gen_random_uuid(), 'content.newsletters', 'Manage Newsletters', 'Manage newsletter archives', 'content', NOW()), + (gen_random_uuid(), 'content.documents', 'Manage Documents', 'Manage bylaws and documents', 'content', NOW()), + (gen_random_uuid(), 'content.gallery', 'Manage Gallery', 'Manage event galleries', 'content', NOW()), + + -- System Permissions + (gen_random_uuid(), 'system.settings', 'System Settings', 'Manage system settings', 'system', NOW()), + (gen_random_uuid(), 'system.roles', 'Manage Roles', 'Create/edit roles and permissions', 'system', NOW()), + (gen_random_uuid(), 'system.invitations', 'Manage Invitations', 'Send admin invitations', 'system', NOW()), + (gen_random_uuid(), 'system.storage', 'Manage Storage', 'View storage usage', 'system', NOW()), + (gen_random_uuid(), 'system.audit', 'View Audit Logs', 'View system audit logs', 'system', NOW()) +ON CONFLICT (code) DO NOTHING; + +-- ============================================================================ +-- STEP 3: Assign Permissions to Roles +-- ============================================================================ + +-- Guest Role: No permissions (view-only through public pages) +-- No entries needed + +-- Member Role: Limited permissions +INSERT INTO role_permissions (id, role, role_id, permission_id, created_at) +SELECT + gen_random_uuid(), + 'member', + (SELECT id FROM roles WHERE code = 'member'), + p.id, + NOW() +FROM permissions p +WHERE p.code IN ( + 'events.view' +) +ON CONFLICT DO NOTHING; + +-- Admin Role: Most permissions except financial +INSERT INTO role_permissions (id, role, role_id, permission_id, created_at) +SELECT + gen_random_uuid(), + 'admin', + (SELECT id FROM roles WHERE code = 'admin'), + p.id, + NOW() +FROM permissions p +WHERE p.code IN ( + -- User Management + 'users.view', 'users.create', 'users.edit', 'users.approve', 'users.import', + -- Event Management + 'events.view', 'events.create', 'events.edit', 'events.delete', 'events.publish', 'events.manage_attendance', + -- Content Management + 'content.newsletters', 'content.documents', 'content.gallery', + -- System (limited) + 'system.invitations', 'system.storage' +) +ON CONFLICT DO NOTHING; + +-- Finance Role: Financial permissions + basic access +INSERT INTO role_permissions (id, role, role_id, permission_id, created_at) +SELECT + gen_random_uuid(), + 'finance', + (SELECT id FROM roles WHERE code = 'finance'), + p.id, + NOW() +FROM permissions p +WHERE p.code IN ( + -- Financial + 'finance.view', 'finance.manage_plans', 'finance.manage_subscriptions', 'finance.view_reports', 'finance.export', + -- Basic Access + 'users.view', 'events.view' +) +ON CONFLICT DO NOTHING; + +-- Superadmin Role: All permissions +INSERT INTO role_permissions (id, role, role_id, permission_id, created_at) +SELECT + gen_random_uuid(), + 'superadmin', + (SELECT id FROM roles WHERE code = 'superadmin'), + p.id, + NOW() +FROM permissions p +ON CONFLICT DO NOTHING; + +-- ============================================================================ +-- STEP 4: Create Subscription Plans +-- ============================================================================ + +INSERT INTO subscription_plans (id, name, description, price_cents, billing_cycle, custom_cycle_enabled, minimum_price_cents, allow_donation, is_active, created_at, updated_at) +VALUES + -- Annual Individual Membership + ( + gen_random_uuid(), + 'Annual Individual Membership', + 'Standard annual membership for one person. Includes access to all LOAF events, member directory, and exclusive content.', + 6000, -- $60.00 + 'annual', + false, + 6000, + false, + true, + NOW(), + NOW() + ), + + -- Annual Group Membership + ( + gen_random_uuid(), + 'Annual Group Membership', + 'Annual membership for two people living at the same address. Both members receive full access to all LOAF benefits.', + 10000, -- $100.00 + 'annual', + false, + 10000, + false, + true, + NOW(), + NOW() + ), + + -- Pay What You Want (with minimum) + ( + gen_random_uuid(), + 'Pay What You Want Membership', + 'Choose your own annual membership amount. Minimum $30. Additional contributions help support our scholarship fund.', + 3000, -- $30.00 minimum + 'annual', + true, -- Allow custom amount + 3000, -- Minimum $30 + true, -- Additional amount is treated as donation + true, + NOW(), + NOW() + ) +ON CONFLICT DO NOTHING; + +-- ============================================================================ +-- STEP 5: Initialize Storage Usage (if not already done) +-- ============================================================================ + +INSERT INTO storage_usage (id, total_bytes_used, max_bytes_allowed, last_calculated_at, created_at, updated_at) +VALUES (gen_random_uuid(), 0, 107374182400, NOW(), NOW(), NOW()) -- 100GB limit +ON CONFLICT DO NOTHING; + +COMMIT; + +-- ============================================================================ +-- Success Message +-- ============================================================================ + +\echo '✅ Seed data created successfully!' +\echo '' +\echo 'Created:' +\echo ' - 5 default roles (guest, member, admin, finance, superadmin)' +\echo ' - 25 permissions across 5 modules' +\echo ' - Role-permission mappings' +\echo ' - 3 subscription plans' +\echo ' - Storage usage initialization' +\echo '' +\echo 'Next steps:' +\echo ' 1. Create superadmin user (see instructions below)' +\echo ' 2. Configure Stripe price IDs in subscription_plans' +\echo ' 3. Start the application' +\echo '' +\echo '━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━' +\echo 'CREATE SUPERADMIN USER:' +\echo '━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━' +\echo '' +\echo 'Generate password hash in Python:' +\echo ' python3 -c "import bcrypt; print(bcrypt.hashpw(b\"your-password\", bcrypt.gensalt()).decode())"' +\echo '' +\echo 'Then run:' +\echo ' psql -U postgres -d loaf_new' +\echo '' +\echo 'INSERT INTO users (' +\echo ' id, email, password_hash, first_name, last_name,' +\echo ' status, role, email_verified, created_at, updated_at' +\echo ') VALUES (' +\echo ' gen_random_uuid(),' +\echo ' '\''admin@loafmembers.org'\'',' +\echo ' '\''$2b$12$YOUR_BCRYPT_HASH_HERE'\'',' +\echo ' '\''Admin'\'',' +\echo ' '\''User'\'',' +\echo ' '\''active'\'',' +\echo ' '\''superadmin'\'',' +\echo ' true,' +\echo ' NOW(),' +\echo ' NOW()' +\echo ');' +\echo '' diff --git a/models.py b/models.py index a48761a..a8d8d30 100644 --- a/models.py +++ b/models.py @@ -69,6 +69,7 @@ class User(Base): role_id = Column(UUID(as_uuid=True), ForeignKey("roles.id"), nullable=True) # New dynamic role FK email_verified = Column(Boolean, default=False) email_verification_token = Column(String, nullable=True) + email_verification_expires = Column(DateTime, nullable=True) newsletter_subscribed = Column(Boolean, default=False) # Newsletter Publication Preferences (Step 2) diff --git a/requirements.txt b/requirements.txt index 13ceacf..5886c43 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,5 @@ aiosmtplib==5.0.0 +alembic==1.14.0 annotated-types==0.7.0 anyio==4.11.0 bcrypt==4.1.3 diff --git a/server.py b/server.py index a924204..beef198 100644 --- a/server.py +++ b/server.py @@ -199,12 +199,12 @@ class UserResponse(BaseModel): email: str first_name: str last_name: str - phone: str - address: str - city: str - state: str - zipcode: str - date_of_birth: datetime + phone: Optional[str] = None + address: Optional[str] = None + city: Optional[str] = None + state: Optional[str] = None + zipcode: Optional[str] = None + date_of_birth: Optional[datetime] = None status: str role: str email_verified: bool @@ -364,6 +364,9 @@ class AttendanceUpdate(BaseModel): user_id: str attended: bool +class BatchAttendanceUpdate(BaseModel): + updates: list[AttendanceUpdate] + class UpdateUserStatusRequest(BaseModel): status: str @@ -1499,7 +1502,14 @@ async def get_events( EventRSVP.rsvp_status == RSVPStatus.yes ).count() - # No user_rsvp_status in public endpoint + # Get current user's RSVP status for this event + user_rsvp = db.query(EventRSVP).filter( + EventRSVP.event_id == event.id, + EventRSVP.user_id == current_user.id + ).first() + + user_rsvp_status = user_rsvp.rsvp_status.value if user_rsvp else None + result.append(EventResponse( id=str(event.id), title=event.title, @@ -1512,7 +1522,7 @@ async def get_events( created_by=str(event.created_by), created_at=event.created_at, rsvp_count=rsvp_count, - user_rsvp_status=None + user_rsvp_status=user_rsvp_status )) return result @@ -1532,9 +1542,14 @@ async def get_event( EventRSVP.rsvp_status == RSVPStatus.yes ).count() - # No user_rsvp_status in public endpoint - user_rsvp = None - + # Get current user's RSVP status for this event + user_rsvp = db.query(EventRSVP).filter( + EventRSVP.event_id == event_id, + EventRSVP.user_id == current_user.id + ).first() + + user_rsvp_status = user_rsvp.rsvp_status.value if user_rsvp else None + return EventResponse( id=str(event.id), title=event.title, @@ -1547,7 +1562,7 @@ async def get_event( created_by=str(event.created_by), created_at=event.created_at, rsvp_count=rsvp_count, - user_rsvp_status=user_rsvp + user_rsvp_status=user_rsvp_status ) @api_router.post("/events/{event_id}/rsvp") @@ -1618,7 +1633,9 @@ async def get_my_event_activity( } # Separate upcoming vs past events - if event.end_at > now: + # Ensure timezone-aware comparison + event_end_at = event.end_at.replace(tzinfo=timezone.utc) if event.end_at.tzinfo is None else event.end_at + if event_end_at > now: upcoming_events.append(event_data) else: past_events.append(event_data) @@ -2814,8 +2831,9 @@ async def verify_invitation_token( if not invitation: raise HTTPException(status_code=404, detail="Invalid or expired invitation token") - # Check expiry - if invitation.expires_at < datetime.now(timezone.utc): + # Check expiry (handle timezone-naive datetime from DB) + expires_at_aware = invitation.expires_at.replace(tzinfo=timezone.utc) if invitation.expires_at.tzinfo is None else invitation.expires_at + if expires_at_aware < datetime.now(timezone.utc): invitation.status = InvitationStatus.expired db.commit() raise HTTPException(status_code=400, detail="Invitation has expired") @@ -2847,8 +2865,9 @@ async def accept_invitation( if not invitation: raise HTTPException(status_code=404, detail="Invalid or expired invitation token") - # Check expiry - if invitation.expires_at < datetime.now(timezone.utc): + # Check expiry (handle timezone-naive datetime from DB) + expires_at_aware = invitation.expires_at.replace(tzinfo=timezone.utc) if invitation.expires_at.tzinfo is None else invitation.expires_at + if expires_at_aware < datetime.now(timezone.utc): invitation.status = InvitationStatus.expired db.commit() raise HTTPException(status_code=400, detail="Invitation has expired") @@ -3791,9 +3810,40 @@ async def update_event( db.commit() db.refresh(event) - + return {"message": "Event updated successfully"} +@api_router.get("/admin/events/{event_id}", response_model=EventResponse) +async def get_admin_event( + event_id: str, + current_user: User = Depends(require_permission("events.view")), + db: Session = Depends(get_db) +): + """Get single event details (admin) - allows viewing unpublished events""" + event = db.query(Event).filter(Event.id == event_id).first() + if not event: + raise HTTPException(status_code=404, detail="Event not found") + + rsvp_count = db.query(EventRSVP).filter( + EventRSVP.event_id == event.id, + EventRSVP.rsvp_status == RSVPStatus.yes + ).count() + + return EventResponse( + id=str(event.id), + title=event.title, + description=event.description, + start_at=event.start_at, + end_at=event.end_at, + location=event.location, + capacity=event.capacity, + published=event.published, + created_by=str(event.created_by), + created_at=event.created_at, + rsvp_count=rsvp_count, + user_rsvp_status=None + ) + @api_router.get("/admin/events/{event_id}/rsvps") async def get_event_rsvps( event_id: str, @@ -3824,46 +3874,53 @@ async def get_event_rsvps( @api_router.put("/admin/events/{event_id}/attendance") async def mark_attendance( event_id: str, - request: AttendanceUpdate, + request: BatchAttendanceUpdate, current_user: User = Depends(require_permission("events.attendance")), db: Session = Depends(get_db) ): + """Mark attendance for one or more users (supports batch updates)""" event = db.query(Event).filter(Event.id == event_id).first() if not event: raise HTTPException(status_code=404, detail="Event not found") - - rsvp = db.query(EventRSVP).filter( - EventRSVP.event_id == event_id, - EventRSVP.user_id == request.user_id - ).first() - # Auto-create RSVP if it doesn't exist (for retroactive attendance marking) - if not rsvp: - rsvp = EventRSVP( - event_id=event_id, - user_id=request.user_id, - rsvp_status=RSVPStatus.yes, # Default to 'yes' for attended events - attended=False, - created_at=datetime.now(timezone.utc), - updated_at=datetime.now(timezone.utc) - ) - db.add(rsvp) - db.flush() # Get the ID without committing + updated_count = 0 + + # Process each update in the batch + for update in request.updates: + rsvp = db.query(EventRSVP).filter( + EventRSVP.event_id == event_id, + EventRSVP.user_id == update.user_id + ).first() + + # Auto-create RSVP if it doesn't exist (for retroactive attendance marking) + if not rsvp: + rsvp = EventRSVP( + event_id=event_id, + user_id=update.user_id, + rsvp_status=RSVPStatus.yes, # Default to 'yes' for attended events + attended=False, + created_at=datetime.now(timezone.utc), + updated_at=datetime.now(timezone.utc) + ) + db.add(rsvp) + db.flush() # Get the ID without committing + + rsvp.attended = update.attended + rsvp.attended_at = datetime.now(timezone.utc) if update.attended else None + rsvp.updated_at = datetime.now(timezone.utc) + + # If user attended and they were pending validation, update their status + if update.attended: + user = db.query(User).filter(User.id == update.user_id).first() + if user and user.status == UserStatus.pending_validation: + user.status = UserStatus.pre_validated + user.updated_at = datetime.now(timezone.utc) + + updated_count += 1 - rsvp.attended = request.attended - rsvp.attended_at = datetime.now(timezone.utc) if request.attended else None - rsvp.updated_at = datetime.now(timezone.utc) - - # If user attended and they were pending validation, update their status - if request.attended: - user = db.query(User).filter(User.id == request.user_id).first() - if user and user.status == UserStatus.pending_validation: - user.status = UserStatus.pre_validated - user.updated_at = datetime.now(timezone.utc) - db.commit() - return {"message": "Attendance marked successfully"} + return {"message": f"Attendance marked successfully for {updated_count} {'person' if updated_count == 1 else 'people'}"} @api_router.get("/admin/events") async def get_admin_events(