diff --git a/.env.example b/.env.example
index 01689db..a20ff27 100644
--- a/.env.example
+++ b/.env.example
@@ -31,3 +31,20 @@ FRONTEND_URL=http://localhost:3000
# Stripe Configuration (for future payment integration)
# STRIPE_SECRET_KEY=sk_test_...
# STRIPE_WEBHOOK_SECRET=whsec_...
+
+# Cloudflare R2 Storage Configuration
+R2_ACCOUNT_ID=your_r2_account_id
+R2_ACCESS_KEY_ID=your_r2_access_key_id
+R2_SECRET_ACCESS_KEY=your_r2_secret_access_key
+R2_BUCKET_NAME=loaf-membership-storage
+R2_PUBLIC_URL=https://your-r2-public-url.com
+
+# Storage Limits (in bytes)
+MAX_STORAGE_BYTES=10737418240 # 10GB default
+MAX_FILE_SIZE_BYTES=52428800 # 50MB per file default
+
+# Microsoft Calendar API Configuration
+MS_CALENDAR_CLIENT_ID=your_microsoft_client_id
+MS_CALENDAR_CLIENT_SECRET=your_microsoft_client_secret
+MS_CALENDAR_TENANT_ID=your_microsoft_tenant_id
+MS_CALENDAR_REDIRECT_URI=http://localhost:8000/membership/api/calendar/callback
diff --git a/__pycache__/calendar_service.cpython-312.pyc b/__pycache__/calendar_service.cpython-312.pyc
new file mode 100644
index 0000000..636a4c2
Binary files /dev/null and b/__pycache__/calendar_service.cpython-312.pyc differ
diff --git a/__pycache__/email_service.cpython-312.pyc b/__pycache__/email_service.cpython-312.pyc
index d622758..a409ff7 100644
Binary files a/__pycache__/email_service.cpython-312.pyc and b/__pycache__/email_service.cpython-312.pyc differ
diff --git a/__pycache__/models.cpython-312.pyc b/__pycache__/models.cpython-312.pyc
index fc20ace..32fb823 100644
Binary files a/__pycache__/models.cpython-312.pyc and b/__pycache__/models.cpython-312.pyc differ
diff --git a/__pycache__/ms_calendar_service.cpython-312.pyc b/__pycache__/ms_calendar_service.cpython-312.pyc
new file mode 100644
index 0000000..7b7c70d
Binary files /dev/null and b/__pycache__/ms_calendar_service.cpython-312.pyc differ
diff --git a/__pycache__/r2_storage.cpython-312.pyc b/__pycache__/r2_storage.cpython-312.pyc
new file mode 100644
index 0000000..9ffbb17
Binary files /dev/null and b/__pycache__/r2_storage.cpython-312.pyc differ
diff --git a/__pycache__/server.cpython-312.pyc b/__pycache__/server.cpython-312.pyc
index 6c451b6..73f53e8 100644
Binary files a/__pycache__/server.cpython-312.pyc and b/__pycache__/server.cpython-312.pyc differ
diff --git a/calendar_service.py b/calendar_service.py
new file mode 100644
index 0000000..3ca77f1
--- /dev/null
+++ b/calendar_service.py
@@ -0,0 +1,127 @@
+"""
+Calendar Service for generating iCalendar (.ics) data
+Implements RFC 5545 iCalendar format for universal calendar compatibility
+"""
+
+from icalendar import Calendar, Event as iCalEvent, Alarm
+from datetime import datetime, timedelta
+from zoneinfo import ZoneInfo
+import uuid
+import os
+
+
+class CalendarService:
+ """Service for generating iCalendar (.ics) data compatible with all calendar apps"""
+
+ def __init__(self):
+ self.domain = os.getenv('CALENDAR_DOMAIN', 'loaf.community')
+ self.timezone = ZoneInfo(os.getenv('CALENDAR_TIMEZONE', 'America/New_York'))
+
+ def generate_event_uid(self) -> str:
+ """
+ Generate unique event identifier (UUID4 hex-encoded per RFC 7986)
+
+ Returns:
+ str: Unique identifier in format {uuid}@{domain}
+ """
+ return f"{uuid.uuid4().hex}@{self.domain}"
+
+ def event_to_ical_event(self, event, include_reminder: bool = True):
+ """
+ Convert database Event model to iCalendar Event component
+
+ Args:
+ event: Event model instance from database
+ include_reminder: Whether to add 1-hour reminder alarm
+
+ Returns:
+ icalendar.Event: iCalendar event component
+ """
+ ical_event = iCalEvent()
+
+ # Required properties
+ ical_event.add('uid', event.calendar_uid or self.generate_event_uid())
+ ical_event.add('dtstamp', datetime.now(self.timezone))
+ ical_event.add('dtstart', event.start_at)
+ ical_event.add('dtend', event.end_at)
+ ical_event.add('summary', event.title)
+
+ # Optional properties
+ if event.description:
+ ical_event.add('description', event.description)
+ if event.location:
+ ical_event.add('location', event.location)
+
+ # Metadata
+ ical_event.add('url', f"https://{self.domain}/events/{event.id}")
+ ical_event.add('status', 'CONFIRMED')
+ ical_event.add('sequence', 0)
+
+ # Add 1-hour reminder (VALARM component)
+ if include_reminder:
+ alarm = Alarm()
+ alarm.add('action', 'DISPLAY')
+ alarm.add('description', f"Reminder: {event.title}")
+ alarm.add('trigger', timedelta(hours=-1))
+ ical_event.add_component(alarm)
+
+ return ical_event
+
+ def create_calendar(self, name: str, description: str = None):
+ """
+ Create base calendar with metadata
+
+ Args:
+ name: Calendar name (X-WR-CALNAME)
+ description: Optional calendar description
+
+ Returns:
+ icalendar.Calendar: Base calendar object
+ """
+ cal = Calendar()
+ cal.add('prodid', '-//LOAF Membership Platform//EN')
+ cal.add('version', '2.0')
+ cal.add('x-wr-calname', name)
+ cal.add('x-wr-timezone', str(self.timezone))
+ if description:
+ cal.add('x-wr-caldesc', description)
+ cal.add('method', 'PUBLISH')
+ cal.add('calscale', 'GREGORIAN')
+ return cal
+
+ def create_single_event_calendar(self, event) -> bytes:
+ """
+ Create calendar with single event for download
+
+ Args:
+ event: Event model instance
+
+ Returns:
+ bytes: iCalendar data as bytes
+ """
+ cal = self.create_calendar(event.title)
+ ical_event = self.event_to_ical_event(event)
+ cal.add_component(ical_event)
+ return cal.to_ics()
+
+ def create_subscription_feed(self, events: list, feed_name: str) -> bytes:
+ """
+ Create calendar subscription feed with multiple events
+
+ Args:
+ events: List of Event model instances
+ feed_name: Name for the calendar feed
+
+ Returns:
+ bytes: iCalendar data as bytes
+ """
+ cal = self.create_calendar(
+ feed_name,
+ description="LOAF Community Events - Auto-syncing calendar feed"
+ )
+
+ for event in events:
+ ical_event = self.event_to_ical_event(event)
+ cal.add_component(ical_event)
+
+ return cal.to_ics()
diff --git a/email_service.py b/email_service.py
index 1933a26..735c27b 100644
--- a/email_service.py
+++ b/email_service.py
@@ -135,13 +135,13 @@ async def send_verification_email(to_email: str, token: str):
@@ -156,7 +156,7 @@ async def send_verification_email(to_email: str, token: str):
Verify Email
Or copy and paste this link into your browser:
- {verification_url}
+ {verification_url}
This link will expire in 24 hours.
If you didn't create an account, please ignore this email.
@@ -175,12 +175,13 @@ async def send_approval_notification(to_email: str, first_name: str):
@@ -211,17 +212,17 @@ async def send_payment_prompt_email(to_email: str, first_name: str):
@@ -250,7 +251,7 @@ async def send_payment_prompt_email(to_email: str, first_name: str):
We're excited to have you join the LOAF community!
-
+
Questions? Contact us at support@loaf.org
@@ -269,14 +270,14 @@ async def send_password_reset_email(to_email: str, first_name: str, reset_url: s
@@ -297,7 +298,7 @@ async def send_password_reset_email(to_email: str, first_name: str, reset_url: s
If you didn't request this, please ignore this email.
-
+
Or copy and paste this link into your browser:
{reset_url}
@@ -320,8 +321,8 @@ async def send_admin_password_reset_email(
force_change_text = (
"""
-
-
⚠️ You will be required to change this password when you log in.
+
+
⚠️ You will be required to change this password when you log in.
"""
) if force_change else ""
@@ -333,15 +334,15 @@ async def send_admin_password_reset_email(
@@ -365,7 +366,7 @@ async def send_admin_password_reset_email(
Go to Login
-
+
Questions? Contact us at support@loaf.org
diff --git a/migrations/README.md b/migrations/README.md
new file mode 100644
index 0000000..4670569
--- /dev/null
+++ b/migrations/README.md
@@ -0,0 +1,138 @@
+# Database Migrations
+
+This folder contains SQL migration scripts for the membership platform.
+
+## Running the Sprint 1-3 Migration
+
+The `sprint_1_2_3_migration.sql` file adds all necessary columns and tables for the Members Only features (Sprints 1, 2, and 3).
+
+### Prerequisites
+
+- PostgreSQL installed and running
+- Database created (e.g., `membership_db`)
+- Database connection credentials from your `.env` file
+
+### Option 1: Using psql command line
+
+```bash
+# Navigate to the migrations directory
+cd /Users/andika/Documents/Works/Koncept\ Kit/KKN/membership-website/backend/migrations
+
+# Run the migration (replace with your database credentials)
+psql -U your_username -d membership_db -f sprint_1_2_3_migration.sql
+
+# Or if you have a connection string
+psql "postgresql://user:password@localhost:5432/membership_db" -f sprint_1_2_3_migration.sql
+```
+
+### Option 2: Using pgAdmin or another GUI tool
+
+1. Open pgAdmin and connect to your database
+2. Open the Query Tool
+3. Load the `sprint_1_2_3_migration.sql` file
+4. Execute the script
+
+### Option 3: Using Python script
+
+```bash
+cd /Users/andika/Documents/Works/Koncept\ Kit/KKN/membership-website/backend
+
+# Run the migration using Python
+python3 -c "
+import psycopg2
+import os
+from dotenv import load_dotenv
+
+load_dotenv()
+DATABASE_URL = os.getenv('DATABASE_URL')
+
+conn = psycopg2.connect(DATABASE_URL)
+cur = conn.cursor()
+
+with open('migrations/sprint_1_2_3_migration.sql', 'r') as f:
+ sql = f.read()
+ cur.execute(sql)
+
+conn.commit()
+cur.close()
+conn.close()
+print('Migration completed successfully!')
+"
+```
+
+### What Gets Added
+
+**Users Table:**
+- `profile_photo_url` - Stores Cloudflare R2 URL for profile photos
+- `social_media_facebook` - Facebook profile/page URL
+- `social_media_instagram` - Instagram handle or URL
+- `social_media_twitter` - Twitter/X handle or URL
+- `social_media_linkedin` - LinkedIn profile URL
+
+**Events Table:**
+- `microsoft_calendar_id` - Microsoft Calendar event ID for syncing
+- `microsoft_calendar_sync_enabled` - Boolean flag for sync status
+
+**New Tables:**
+- `event_galleries` - Stores event photos with captions
+- `newsletter_archives` - Stores newsletter documents
+- `financial_reports` - Stores annual financial reports
+- `bylaws_documents` - Stores organization bylaws
+- `storage_usage` - Tracks Cloudflare R2 storage usage
+
+### Verification
+
+After running the migration, verify it worked:
+
+```sql
+-- Check users table columns
+SELECT column_name, data_type
+FROM information_schema.columns
+WHERE table_name = 'users'
+AND column_name IN ('profile_photo_url', 'social_media_facebook');
+
+-- Check new tables exist
+SELECT table_name
+FROM information_schema.tables
+WHERE table_name IN ('event_galleries', 'storage_usage');
+
+-- Check storage_usage has initial record
+SELECT * FROM storage_usage;
+```
+
+### Troubleshooting
+
+**Error: "relation does not exist"**
+- Make sure you're connected to the correct database
+- Verify the `users` and `events` tables exist first
+
+**Error: "column already exists"**
+- This is safe to ignore - the script uses `IF NOT EXISTS` clauses
+
+**Error: "permission denied"**
+- Make sure your database user has ALTER TABLE privileges
+- You may need to run as a superuser or database owner
+
+### Rollback (if needed)
+
+If you need to undo the migration:
+
+```sql
+-- Remove new columns from users
+ALTER TABLE users DROP COLUMN IF EXISTS profile_photo_url;
+ALTER TABLE users DROP COLUMN IF EXISTS social_media_facebook;
+ALTER TABLE users DROP COLUMN IF EXISTS social_media_instagram;
+ALTER TABLE users DROP COLUMN IF EXISTS social_media_twitter;
+ALTER TABLE users DROP COLUMN IF EXISTS social_media_linkedin;
+
+-- Remove new columns from events
+ALTER TABLE events DROP COLUMN IF EXISTS microsoft_calendar_id;
+ALTER TABLE events DROP COLUMN IF EXISTS microsoft_calendar_sync_enabled;
+
+-- Remove new tables
+DROP TABLE IF EXISTS event_galleries;
+DROP TABLE IF EXISTS newsletter_archives;
+DROP TABLE IF EXISTS financial_reports;
+DROP TABLE IF EXISTS bylaws_documents;
+DROP TABLE IF EXISTS storage_usage;
+```
diff --git a/migrations/add_calendar_uid.sql b/migrations/add_calendar_uid.sql
new file mode 100644
index 0000000..6a90f88
--- /dev/null
+++ b/migrations/add_calendar_uid.sql
@@ -0,0 +1,38 @@
+-- Migration: Add calendar_uid to events table and remove Microsoft Calendar columns
+-- Sprint 2: Universal Calendar Export
+
+-- Add new calendar_uid column
+ALTER TABLE events ADD COLUMN IF NOT EXISTS calendar_uid VARCHAR;
+
+-- Remove old Microsoft Calendar columns (if they exist)
+ALTER TABLE events DROP COLUMN IF EXISTS microsoft_calendar_id;
+ALTER TABLE events DROP COLUMN IF EXISTS microsoft_calendar_sync_enabled;
+
+-- Verify migration
+DO $$
+BEGIN
+ -- Check if calendar_uid exists
+ IF EXISTS (
+ SELECT 1 FROM information_schema.columns
+ WHERE table_name = 'events' AND column_name = 'calendar_uid'
+ ) THEN
+ RAISE NOTICE '✅ calendar_uid column added successfully';
+ ELSE
+ RAISE NOTICE '⚠️ calendar_uid column not found';
+ END IF;
+
+ -- Check if old columns are removed
+ IF NOT EXISTS (
+ SELECT 1 FROM information_schema.columns
+ WHERE table_name = 'events' AND column_name = 'microsoft_calendar_id'
+ ) THEN
+ RAISE NOTICE '✅ microsoft_calendar_id column removed';
+ END IF;
+
+ IF NOT EXISTS (
+ SELECT 1 FROM information_schema.columns
+ WHERE table_name = 'events' AND column_name = 'microsoft_calendar_sync_enabled'
+ ) THEN
+ RAISE NOTICE '✅ microsoft_calendar_sync_enabled column removed';
+ END IF;
+END $$;
diff --git a/migrations/complete_fix.sql b/migrations/complete_fix.sql
new file mode 100644
index 0000000..625b97f
--- /dev/null
+++ b/migrations/complete_fix.sql
@@ -0,0 +1,161 @@
+-- Complete Fix for Sprint 1-3 Migration
+-- Safe to run multiple times
+
+-- ==============================================
+-- Step 1: Add columns to users table
+-- ==============================================
+
+DO $$
+BEGIN
+ -- Add profile_photo_url
+ IF NOT EXISTS (
+ SELECT 1 FROM information_schema.columns
+ WHERE table_name = 'users' AND column_name = 'profile_photo_url'
+ ) THEN
+ ALTER TABLE users ADD COLUMN profile_photo_url VARCHAR;
+ RAISE NOTICE 'Added profile_photo_url to users table';
+ ELSE
+ RAISE NOTICE 'profile_photo_url already exists in users table';
+ END IF;
+
+ -- Add social_media_facebook
+ IF NOT EXISTS (
+ SELECT 1 FROM information_schema.columns
+ WHERE table_name = 'users' AND column_name = 'social_media_facebook'
+ ) THEN
+ ALTER TABLE users ADD COLUMN social_media_facebook VARCHAR;
+ RAISE NOTICE 'Added social_media_facebook to users table';
+ ELSE
+ RAISE NOTICE 'social_media_facebook already exists in users table';
+ END IF;
+
+ -- Add social_media_instagram
+ IF NOT EXISTS (
+ SELECT 1 FROM information_schema.columns
+ WHERE table_name = 'users' AND column_name = 'social_media_instagram'
+ ) THEN
+ ALTER TABLE users ADD COLUMN social_media_instagram VARCHAR;
+ RAISE NOTICE 'Added social_media_instagram to users table';
+ ELSE
+ RAISE NOTICE 'social_media_instagram already exists in users table';
+ END IF;
+
+ -- Add social_media_twitter
+ IF NOT EXISTS (
+ SELECT 1 FROM information_schema.columns
+ WHERE table_name = 'users' AND column_name = 'social_media_twitter'
+ ) THEN
+ ALTER TABLE users ADD COLUMN social_media_twitter VARCHAR;
+ RAISE NOTICE 'Added social_media_twitter to users table';
+ ELSE
+ RAISE NOTICE 'social_media_twitter already exists in users table';
+ END IF;
+
+ -- Add social_media_linkedin
+ IF NOT EXISTS (
+ SELECT 1 FROM information_schema.columns
+ WHERE table_name = 'users' AND column_name = 'social_media_linkedin'
+ ) THEN
+ ALTER TABLE users ADD COLUMN social_media_linkedin VARCHAR;
+ RAISE NOTICE 'Added social_media_linkedin to users table';
+ ELSE
+ RAISE NOTICE 'social_media_linkedin already exists in users table';
+ END IF;
+END $$;
+
+-- ==============================================
+-- Step 2: Add columns to events table
+-- ==============================================
+
+DO $$
+BEGIN
+ -- Add microsoft_calendar_id
+ IF NOT EXISTS (
+ SELECT 1 FROM information_schema.columns
+ WHERE table_name = 'events' AND column_name = 'microsoft_calendar_id'
+ ) THEN
+ ALTER TABLE events ADD COLUMN microsoft_calendar_id VARCHAR;
+ RAISE NOTICE 'Added microsoft_calendar_id to events table';
+ ELSE
+ RAISE NOTICE 'microsoft_calendar_id already exists in events table';
+ END IF;
+
+ -- Add microsoft_calendar_sync_enabled
+ IF NOT EXISTS (
+ SELECT 1 FROM information_schema.columns
+ WHERE table_name = 'events' AND column_name = 'microsoft_calendar_sync_enabled'
+ ) THEN
+ ALTER TABLE events ADD COLUMN microsoft_calendar_sync_enabled BOOLEAN DEFAULT FALSE;
+ RAISE NOTICE 'Added microsoft_calendar_sync_enabled to events table';
+ ELSE
+ RAISE NOTICE 'microsoft_calendar_sync_enabled already exists in events table';
+ END IF;
+END $$;
+
+-- ==============================================
+-- Step 3: Fix storage_usage initialization
+-- ==============================================
+
+-- Delete any incomplete records
+DELETE FROM storage_usage WHERE id IS NULL;
+
+-- Insert initial record if table is empty
+INSERT INTO storage_usage (id, total_bytes_used, max_bytes_allowed, last_updated)
+SELECT
+ gen_random_uuid(),
+ 0,
+ 10737418240, -- 10GB default
+ CURRENT_TIMESTAMP
+WHERE NOT EXISTS (SELECT 1 FROM storage_usage);
+
+-- ==============================================
+-- Step 4: Verify everything
+-- ==============================================
+
+DO $$
+DECLARE
+ user_col_count INT;
+ event_col_count INT;
+ storage_count INT;
+BEGIN
+ -- Count users columns
+ SELECT COUNT(*) INTO user_col_count
+ FROM information_schema.columns
+ WHERE table_name = 'users'
+ AND column_name IN (
+ 'profile_photo_url',
+ 'social_media_facebook',
+ 'social_media_instagram',
+ 'social_media_twitter',
+ 'social_media_linkedin'
+ );
+
+ -- Count events columns
+ SELECT COUNT(*) INTO event_col_count
+ FROM information_schema.columns
+ WHERE table_name = 'events'
+ AND column_name IN (
+ 'microsoft_calendar_id',
+ 'microsoft_calendar_sync_enabled'
+ );
+
+ -- Count storage_usage records
+ SELECT COUNT(*) INTO storage_count FROM storage_usage;
+
+ -- Report results
+ RAISE NOTICE '';
+ RAISE NOTICE '========================================';
+ RAISE NOTICE 'Migration Verification Results:';
+ RAISE NOTICE '========================================';
+ RAISE NOTICE 'Users table: %/5 columns added', user_col_count;
+ RAISE NOTICE 'Events table: %/2 columns added', event_col_count;
+ RAISE NOTICE 'Storage usage: % record(s)', storage_count;
+ RAISE NOTICE '';
+
+ IF user_col_count = 5 AND event_col_count = 2 AND storage_count > 0 THEN
+ RAISE NOTICE '✅ Migration completed successfully!';
+ ELSE
+ RAISE NOTICE '⚠️ Migration incomplete. Please check the logs above.';
+ END IF;
+ RAISE NOTICE '========================================';
+END $$;
diff --git a/migrations/fix_storage_usage.sql b/migrations/fix_storage_usage.sql
new file mode 100644
index 0000000..5ed6467
--- /dev/null
+++ b/migrations/fix_storage_usage.sql
@@ -0,0 +1,17 @@
+-- Fix storage_usage table initialization
+-- This script safely initializes storage_usage if empty
+
+-- Delete any incomplete records first
+DELETE FROM storage_usage WHERE id IS NULL;
+
+-- Insert with explicit UUID generation if table is empty
+INSERT INTO storage_usage (id, total_bytes_used, max_bytes_allowed, last_updated)
+SELECT
+ gen_random_uuid(),
+ 0,
+ 10737418240, -- 10GB default
+ CURRENT_TIMESTAMP
+WHERE NOT EXISTS (SELECT 1 FROM storage_usage);
+
+-- Verify the record was created
+SELECT * FROM storage_usage;
diff --git a/migrations/sprint_1_2_3_migration.sql b/migrations/sprint_1_2_3_migration.sql
new file mode 100644
index 0000000..0448f70
--- /dev/null
+++ b/migrations/sprint_1_2_3_migration.sql
@@ -0,0 +1,117 @@
+-- Sprint 1, 2, 3 Database Migration
+-- This script adds all new columns and tables for Members Only features
+
+-- ==============================================
+-- Step 1: Add new columns to users table
+-- ==============================================
+
+-- Add profile photo and social media columns (Sprint 1)
+ALTER TABLE users ADD COLUMN IF NOT EXISTS profile_photo_url VARCHAR;
+ALTER TABLE users ADD COLUMN IF NOT EXISTS social_media_facebook VARCHAR;
+ALTER TABLE users ADD COLUMN IF NOT EXISTS social_media_instagram VARCHAR;
+ALTER TABLE users ADD COLUMN IF NOT EXISTS social_media_twitter VARCHAR;
+ALTER TABLE users ADD COLUMN IF NOT EXISTS social_media_linkedin VARCHAR;
+
+-- ==============================================
+-- Step 2: Add new columns to events table
+-- ==============================================
+
+-- Add Microsoft Calendar integration columns (Sprint 2)
+ALTER TABLE events ADD COLUMN IF NOT EXISTS microsoft_calendar_id VARCHAR;
+ALTER TABLE events ADD COLUMN IF NOT EXISTS microsoft_calendar_sync_enabled BOOLEAN DEFAULT FALSE;
+
+-- ==============================================
+-- Step 3: Create new tables
+-- ==============================================
+
+-- EventGallery table (Sprint 3)
+CREATE TABLE IF NOT EXISTS event_galleries (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ event_id UUID NOT NULL REFERENCES events(id) ON DELETE CASCADE,
+ image_url VARCHAR NOT NULL,
+ image_key VARCHAR NOT NULL,
+ caption TEXT,
+ uploaded_by UUID NOT NULL REFERENCES users(id),
+ file_size_bytes INTEGER NOT NULL,
+ created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
+);
+
+-- Create index for faster queries
+CREATE INDEX IF NOT EXISTS idx_event_galleries_event_id ON event_galleries(event_id);
+CREATE INDEX IF NOT EXISTS idx_event_galleries_uploaded_by ON event_galleries(uploaded_by);
+
+-- NewsletterArchive table (Sprint 4 - preparing ahead)
+CREATE TABLE IF NOT EXISTS newsletter_archives (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ title VARCHAR NOT NULL,
+ description TEXT,
+ published_date TIMESTAMP WITH TIME ZONE NOT NULL,
+ document_url VARCHAR NOT NULL,
+ document_type VARCHAR DEFAULT 'google_docs',
+ file_size_bytes INTEGER,
+ created_by UUID NOT NULL REFERENCES users(id),
+ created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
+);
+
+CREATE INDEX IF NOT EXISTS idx_newsletter_archives_published_date ON newsletter_archives(published_date DESC);
+CREATE INDEX IF NOT EXISTS idx_newsletter_archives_created_by ON newsletter_archives(created_by);
+
+-- FinancialReport table (Sprint 4 - preparing ahead)
+CREATE TABLE IF NOT EXISTS financial_reports (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ year INTEGER NOT NULL,
+ title VARCHAR NOT NULL,
+ document_url VARCHAR NOT NULL,
+ document_type VARCHAR DEFAULT 'google_drive',
+ file_size_bytes INTEGER,
+ created_by UUID NOT NULL REFERENCES users(id),
+ created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
+);
+
+CREATE INDEX IF NOT EXISTS idx_financial_reports_year ON financial_reports(year DESC);
+CREATE INDEX IF NOT EXISTS idx_financial_reports_created_by ON financial_reports(created_by);
+
+-- BylawsDocument table (Sprint 4 - preparing ahead)
+CREATE TABLE IF NOT EXISTS bylaws_documents (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ title VARCHAR NOT NULL,
+ version VARCHAR NOT NULL,
+ effective_date TIMESTAMP WITH TIME ZONE NOT NULL,
+ document_url VARCHAR NOT NULL,
+ document_type VARCHAR DEFAULT 'google_drive',
+ file_size_bytes INTEGER,
+ is_current BOOLEAN DEFAULT TRUE,
+ created_by UUID NOT NULL REFERENCES users(id),
+ created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
+);
+
+CREATE INDEX IF NOT EXISTS idx_bylaws_documents_is_current ON bylaws_documents(is_current);
+CREATE INDEX IF NOT EXISTS idx_bylaws_documents_created_by ON bylaws_documents(created_by);
+
+-- StorageUsage table (Sprint 1)
+CREATE TABLE IF NOT EXISTS storage_usage (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ total_bytes_used BIGINT DEFAULT 0,
+ max_bytes_allowed BIGINT NOT NULL,
+ last_updated TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
+);
+
+-- Insert initial storage usage record
+INSERT INTO storage_usage (total_bytes_used, max_bytes_allowed)
+SELECT 0, 10737418240
+WHERE NOT EXISTS (SELECT 1 FROM storage_usage);
+
+-- ==============================================
+-- Migration Complete
+-- ==============================================
+
+-- Verify migrations
+DO $$
+BEGIN
+ RAISE NOTICE 'Migration completed successfully!';
+ RAISE NOTICE 'New columns added to users table: profile_photo_url, social_media_*';
+ RAISE NOTICE 'New columns added to events table: microsoft_calendar_*';
+ RAISE NOTICE 'New tables created: event_galleries, newsletter_archives, financial_reports, bylaws_documents, storage_usage';
+END $$;
diff --git a/migrations/verify_columns.sql b/migrations/verify_columns.sql
new file mode 100644
index 0000000..0ad033e
--- /dev/null
+++ b/migrations/verify_columns.sql
@@ -0,0 +1,54 @@
+-- Verification script to check which columns exist
+-- Run this to see what's missing
+
+-- Check users table columns
+SELECT
+ 'users' as table_name,
+ column_name,
+ data_type,
+ is_nullable
+FROM information_schema.columns
+WHERE table_name = 'users'
+AND column_name IN (
+ 'profile_photo_url',
+ 'social_media_facebook',
+ 'social_media_instagram',
+ 'social_media_twitter',
+ 'social_media_linkedin'
+)
+ORDER BY column_name;
+
+-- Check events table columns
+SELECT
+ 'events' as table_name,
+ column_name,
+ data_type,
+ is_nullable
+FROM information_schema.columns
+WHERE table_name = 'events'
+AND column_name IN (
+ 'microsoft_calendar_id',
+ 'microsoft_calendar_sync_enabled'
+)
+ORDER BY column_name;
+
+-- Check which tables exist
+SELECT
+ table_name,
+ 'EXISTS' as status
+FROM information_schema.tables
+WHERE table_name IN (
+ 'event_galleries',
+ 'newsletter_archives',
+ 'financial_reports',
+ 'bylaws_documents',
+ 'storage_usage'
+)
+ORDER BY table_name;
+
+-- Check storage_usage contents
+SELECT
+ COUNT(*) as record_count,
+ SUM(total_bytes_used) as total_bytes,
+ MAX(max_bytes_allowed) as max_bytes
+FROM storage_usage;
diff --git a/models.py b/models.py
index 79c812f..abd7973 100644
--- a/models.py
+++ b/models.py
@@ -1,4 +1,4 @@
-from sqlalchemy import Column, String, Boolean, DateTime, Enum as SQLEnum, Text, Integer, ForeignKey, JSON
+from sqlalchemy import Column, String, Boolean, DateTime, Enum as SQLEnum, Text, Integer, BigInteger, ForeignKey, JSON
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import relationship
from datetime import datetime, timezone
@@ -82,6 +82,13 @@ class User(Base):
password_reset_expires = Column(DateTime, nullable=True)
force_password_change = Column(Boolean, default=False, nullable=False)
+ # Members Only - Profile Photo & Social Media
+ profile_photo_url = Column(String, nullable=True) # Cloudflare R2 URL
+ social_media_facebook = Column(String, nullable=True)
+ social_media_instagram = Column(String, nullable=True)
+ social_media_twitter = Column(String, nullable=True)
+ social_media_linkedin = Column(String, nullable=True)
+
created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc))
updated_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
@@ -92,7 +99,7 @@ class User(Base):
class Event(Base):
__tablename__ = "events"
-
+
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
title = Column(String, nullable=False)
description = Column(Text, nullable=True)
@@ -102,12 +109,17 @@ class Event(Base):
capacity = Column(Integer, nullable=True)
created_by = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=False)
published = Column(Boolean, default=False)
+
+ # Members Only - Universal Calendar Export
+ calendar_uid = Column(String, nullable=True) # Unique iCalendar UID (UUID4-based)
+
created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc))
updated_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
-
+
# Relationships
creator = relationship("User", back_populates="events_created")
rsvps = relationship("EventRSVP", back_populates="event", cascade="all, delete-orphan")
+ gallery_images = relationship("EventGallery", back_populates="event", cascade="all, delete-orphan")
class EventRSVP(Base):
__tablename__ = "event_rsvps"
@@ -167,3 +179,77 @@ class Subscription(Base):
# Relationships
user = relationship("User", back_populates="subscriptions", foreign_keys=[user_id])
plan = relationship("SubscriptionPlan", back_populates="subscriptions")
+
+class EventGallery(Base):
+ __tablename__ = "event_galleries"
+
+ id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
+ event_id = Column(UUID(as_uuid=True), ForeignKey("events.id"), nullable=False)
+ image_url = Column(String, nullable=False) # Cloudflare R2 URL
+ image_key = Column(String, nullable=False) # R2 object key for deletion
+ caption = Column(Text, nullable=True)
+ uploaded_by = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=False)
+ file_size_bytes = Column(Integer, nullable=False)
+ created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc))
+
+ # Relationships
+ event = relationship("Event", back_populates="gallery_images")
+ uploader = relationship("User")
+
+class NewsletterArchive(Base):
+ __tablename__ = "newsletter_archives"
+
+ id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
+ title = Column(String, nullable=False)
+ description = Column(Text, nullable=True)
+ published_date = Column(DateTime, nullable=False)
+ document_url = Column(String, nullable=False) # Google Docs URL or R2 URL
+ document_type = Column(String, default="google_docs") # google_docs, pdf, upload
+ file_size_bytes = Column(Integer, nullable=True) # For uploaded files
+ created_by = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=False)
+ created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc))
+ updated_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
+
+ # Relationships
+ creator = relationship("User")
+
+class FinancialReport(Base):
+ __tablename__ = "financial_reports"
+
+ id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
+ year = Column(Integer, nullable=False)
+ title = Column(String, nullable=False) # e.g., "2024 Annual Report"
+ document_url = Column(String, nullable=False) # Google Drive URL or R2 URL
+ document_type = Column(String, default="google_drive") # google_drive, pdf, upload
+ file_size_bytes = Column(Integer, nullable=True) # For uploaded files
+ created_by = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=False)
+ created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc))
+ updated_at = Column(DateTime, default=lambda: datetime.now(timezone.utc), onupdate=lambda: datetime.now(timezone.utc))
+
+ # Relationships
+ creator = relationship("User")
+
+class BylawsDocument(Base):
+ __tablename__ = "bylaws_documents"
+
+ id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
+ title = Column(String, nullable=False)
+ version = Column(String, nullable=False) # e.g., "v1.0", "v2.0"
+ effective_date = Column(DateTime, nullable=False)
+ document_url = Column(String, nullable=False) # Google Drive URL or R2 URL
+ document_type = Column(String, default="google_drive") # google_drive, pdf, upload
+ file_size_bytes = Column(Integer, nullable=True) # For uploaded files
+ is_current = Column(Boolean, default=True) # Only one should be current
+ created_by = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=False)
+ created_at = Column(DateTime, default=lambda: datetime.now(timezone.utc))
+
+ # Relationships
+ creator = relationship("User")
+
+class StorageUsage(Base):
+ __tablename__ = "storage_usage"
+
+ id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
+ total_bytes_used = Column(BigInteger, default=0)
+ max_bytes_allowed = Column(BigInteger, nullable=False) # From .env
+ last_updated = Column(DateTime, default=lambda: datetime.now(timezone.utc))
diff --git a/ms_calendar_service.py b/ms_calendar_service.py
new file mode 100644
index 0000000..af54919
--- /dev/null
+++ b/ms_calendar_service.py
@@ -0,0 +1,320 @@
+"""
+Microsoft Calendar Service
+Handles OAuth2 authentication and event synchronization with Microsoft Graph API
+"""
+
+from msal import ConfidentialClientApplication
+import requests
+import os
+from datetime import datetime, timezone
+from typing import Optional, Dict, Any
+from fastapi import HTTPException
+
+
+class MSCalendarService:
+ """
+ Microsoft Calendar Service using MSAL and Microsoft Graph API
+ """
+
+ def __init__(self):
+ """Initialize MSAL client with credentials from environment"""
+ self.client_id = os.getenv('MS_CALENDAR_CLIENT_ID')
+ self.client_secret = os.getenv('MS_CALENDAR_CLIENT_SECRET')
+ self.tenant_id = os.getenv('MS_CALENDAR_TENANT_ID')
+ self.redirect_uri = os.getenv('MS_CALENDAR_REDIRECT_URI')
+
+ if not all([self.client_id, self.client_secret, self.tenant_id]):
+ raise ValueError("Microsoft Calendar credentials not properly configured in environment variables")
+
+ # Initialize MSAL Confidential Client
+ self.app = ConfidentialClientApplication(
+ client_id=self.client_id,
+ client_credential=self.client_secret,
+ authority=f"https://login.microsoftonline.com/{self.tenant_id}"
+ )
+
+ # Microsoft Graph API endpoints
+ self.graph_url = "https://graph.microsoft.com/v1.0"
+ self.scopes = ["https://graph.microsoft.com/.default"]
+
+ def get_access_token(self) -> str:
+ """
+ Get access token using client credentials flow
+
+ Returns:
+ str: Access token for Microsoft Graph API
+
+ Raises:
+ HTTPException: If token acquisition fails
+ """
+ try:
+ result = self.app.acquire_token_for_client(scopes=self.scopes)
+
+ if "access_token" in result:
+ return result["access_token"]
+ else:
+ error = result.get("error_description", "Unknown error")
+ raise HTTPException(
+ status_code=500,
+ detail=f"Failed to acquire access token: {error}"
+ )
+ except Exception as e:
+ raise HTTPException(
+ status_code=500,
+ detail=f"Microsoft authentication error: {str(e)}"
+ )
+
+ def _make_graph_request(
+ self,
+ method: str,
+ endpoint: str,
+ data: Optional[Dict[Any, Any]] = None
+ ) -> Dict[Any, Any]:
+ """
+ Make an authenticated request to Microsoft Graph API
+
+ Args:
+ method: HTTP method (GET, POST, PATCH, DELETE)
+ endpoint: API endpoint path (e.g., "/me/events")
+ data: Request body data
+
+ Returns:
+ Dict: Response JSON
+
+ Raises:
+ HTTPException: If request fails
+ """
+ token = self.get_access_token()
+ headers = {
+ "Authorization": f"Bearer {token}",
+ "Content-Type": "application/json"
+ }
+
+ url = f"{self.graph_url}{endpoint}"
+
+ try:
+ if method.upper() == "GET":
+ response = requests.get(url, headers=headers)
+ elif method.upper() == "POST":
+ response = requests.post(url, headers=headers, json=data)
+ elif method.upper() == "PATCH":
+ response = requests.patch(url, headers=headers, json=data)
+ elif method.upper() == "DELETE":
+ response = requests.delete(url, headers=headers)
+ else:
+ raise ValueError(f"Unsupported HTTP method: {method}")
+
+ response.raise_for_status()
+
+ # DELETE requests may return 204 No Content
+ if response.status_code == 204:
+ return {}
+
+ return response.json()
+
+ except requests.exceptions.HTTPError as e:
+ raise HTTPException(
+ status_code=e.response.status_code,
+ detail=f"Microsoft Graph API error: {e.response.text}"
+ )
+ except Exception as e:
+ raise HTTPException(
+ status_code=500,
+ detail=f"Request to Microsoft Graph failed: {str(e)}"
+ )
+
+ async def create_event(
+ self,
+ title: str,
+ description: str,
+ location: str,
+ start_at: datetime,
+ end_at: datetime,
+ calendar_id: str = "primary"
+ ) -> str:
+ """
+ Create an event in Microsoft Calendar
+
+ Args:
+ title: Event title
+ description: Event description
+ location: Event location
+ start_at: Event start datetime (timezone-aware)
+ end_at: Event end datetime (timezone-aware)
+ calendar_id: Calendar ID (default: "primary")
+
+ Returns:
+ str: Microsoft Calendar Event ID
+
+ Raises:
+ HTTPException: If event creation fails
+ """
+ event_data = {
+ "subject": title,
+ "body": {
+ "contentType": "HTML",
+ "content": description or ""
+ },
+ "start": {
+ "dateTime": start_at.isoformat(),
+ "timeZone": "UTC"
+ },
+ "end": {
+ "dateTime": end_at.isoformat(),
+ "timeZone": "UTC"
+ },
+ "location": {
+ "displayName": location
+ }
+ }
+
+ # Use /me/events for primary calendar or /me/calendars/{id}/events for specific calendar
+ endpoint = "/me/events" if calendar_id == "primary" else f"/me/calendars/{calendar_id}/events"
+
+ result = self._make_graph_request("POST", endpoint, event_data)
+ return result.get("id")
+
+ async def update_event(
+ self,
+ event_id: str,
+ title: Optional[str] = None,
+ description: Optional[str] = None,
+ location: Optional[str] = None,
+ start_at: Optional[datetime] = None,
+ end_at: Optional[datetime] = None
+ ) -> bool:
+ """
+ Update an existing event in Microsoft Calendar
+
+ Args:
+ event_id: Microsoft Calendar Event ID
+ title: Updated event title (optional)
+ description: Updated description (optional)
+ location: Updated location (optional)
+ start_at: Updated start datetime (optional)
+ end_at: Updated end datetime (optional)
+
+ Returns:
+ bool: True if successful
+
+ Raises:
+ HTTPException: If update fails
+ """
+ event_data = {}
+
+ if title:
+ event_data["subject"] = title
+ if description is not None:
+ event_data["body"] = {
+ "contentType": "HTML",
+ "content": description
+ }
+ if location:
+ event_data["location"] = {"displayName": location}
+ if start_at:
+ event_data["start"] = {
+ "dateTime": start_at.isoformat(),
+ "timeZone": "UTC"
+ }
+ if end_at:
+ event_data["end"] = {
+ "dateTime": end_at.isoformat(),
+ "timeZone": "UTC"
+ }
+
+ if not event_data:
+ return True # Nothing to update
+
+ endpoint = f"/me/events/{event_id}"
+ self._make_graph_request("PATCH", endpoint, event_data)
+ return True
+
+ async def delete_event(self, event_id: str) -> bool:
+ """
+ Delete an event from Microsoft Calendar
+
+ Args:
+ event_id: Microsoft Calendar Event ID
+
+ Returns:
+ bool: True if successful
+
+ Raises:
+ HTTPException: If deletion fails
+ """
+ endpoint = f"/me/events/{event_id}"
+ self._make_graph_request("DELETE", endpoint)
+ return True
+
+ async def get_event(self, event_id: str) -> Dict[Any, Any]:
+ """
+ Get event details from Microsoft Calendar
+
+ Args:
+ event_id: Microsoft Calendar Event ID
+
+ Returns:
+ Dict: Event details
+
+ Raises:
+ HTTPException: If retrieval fails
+ """
+ endpoint = f"/me/events/{event_id}"
+ return self._make_graph_request("GET", endpoint)
+
+ async def sync_event(
+ self,
+ loaf_event,
+ existing_ms_event_id: Optional[str] = None
+ ) -> str:
+ """
+ Sync a LOAF event to Microsoft Calendar
+ Creates new event if existing_ms_event_id is None, otherwise updates
+
+ Args:
+ loaf_event: SQLAlchemy Event model instance
+ existing_ms_event_id: Existing Microsoft Calendar Event ID (optional)
+
+ Returns:
+ str: Microsoft Calendar Event ID
+
+ Raises:
+ HTTPException: If sync fails
+ """
+ if existing_ms_event_id:
+ # Update existing event
+ await self.update_event(
+ event_id=existing_ms_event_id,
+ title=loaf_event.title,
+ description=loaf_event.description,
+ location=loaf_event.location,
+ start_at=loaf_event.start_at,
+ end_at=loaf_event.end_at
+ )
+ return existing_ms_event_id
+ else:
+ # Create new event
+ return await self.create_event(
+ title=loaf_event.title,
+ description=loaf_event.description or "",
+ location=loaf_event.location,
+ start_at=loaf_event.start_at,
+ end_at=loaf_event.end_at
+ )
+
+
+# Singleton instance
+_ms_calendar = None
+
+
+def get_ms_calendar_service() -> MSCalendarService:
+ """
+ Get singleton instance of MSCalendarService
+
+ Returns:
+ MSCalendarService: Initialized Microsoft Calendar service
+ """
+ global _ms_calendar
+ if _ms_calendar is None:
+ _ms_calendar = MSCalendarService()
+ return _ms_calendar
diff --git a/r2_storage.py b/r2_storage.py
new file mode 100644
index 0000000..5d8928b
--- /dev/null
+++ b/r2_storage.py
@@ -0,0 +1,243 @@
+"""
+Cloudflare R2 Storage Service
+Handles file uploads, downloads, and deletions using S3-compatible API
+"""
+
+import boto3
+from botocore.client import Config
+from botocore.exceptions import ClientError
+import os
+import uuid
+import magic
+from typing import Optional, BinaryIO
+from fastapi import UploadFile, HTTPException
+from pathlib import Path
+
+
+class R2Storage:
+ """
+ Cloudflare R2 Storage Service using S3-compatible API
+ """
+
+ # Allowed MIME types for uploads
+ ALLOWED_IMAGE_TYPES = {
+ 'image/jpeg': ['.jpg', '.jpeg'],
+ 'image/png': ['.png'],
+ 'image/webp': ['.webp'],
+ 'image/gif': ['.gif']
+ }
+
+ ALLOWED_DOCUMENT_TYPES = {
+ 'application/pdf': ['.pdf'],
+ 'application/msword': ['.doc'],
+ 'application/vnd.openxmlformats-officedocument.wordprocessingml.document': ['.docx'],
+ 'application/vnd.ms-excel': ['.xls'],
+ 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': ['.xlsx']
+ }
+
+ def __init__(self):
+ """Initialize R2 client with credentials from environment"""
+ self.account_id = os.getenv('R2_ACCOUNT_ID')
+ self.access_key = os.getenv('R2_ACCESS_KEY_ID')
+ self.secret_key = os.getenv('R2_SECRET_ACCESS_KEY')
+ self.bucket_name = os.getenv('R2_BUCKET_NAME')
+ self.public_url = os.getenv('R2_PUBLIC_URL')
+
+ if not all([self.account_id, self.access_key, self.secret_key, self.bucket_name]):
+ raise ValueError("R2 credentials not properly configured in environment variables")
+
+ # Initialize S3 client for R2
+ self.client = boto3.client(
+ 's3',
+ endpoint_url=f'https://{self.account_id}.r2.cloudflarestorage.com',
+ aws_access_key_id=self.access_key,
+ aws_secret_access_key=self.secret_key,
+ config=Config(signature_version='s3v4'),
+ )
+
+ async def upload_file(
+ self,
+ file: UploadFile,
+ folder: str,
+ allowed_types: Optional[dict] = None,
+ max_size_bytes: Optional[int] = None
+ ) -> tuple[str, str, int]:
+ """
+ Upload a file to R2 storage
+
+ Args:
+ file: FastAPI UploadFile object
+ folder: Folder path in R2 (e.g., 'profiles', 'gallery/event-id')
+ allowed_types: Dict of allowed MIME types and extensions
+ max_size_bytes: Maximum file size in bytes
+
+ Returns:
+ tuple: (public_url, object_key, file_size_bytes)
+
+ Raises:
+ HTTPException: If upload fails or file is invalid
+ """
+ try:
+ # Read file content
+ content = await file.read()
+ file_size = len(content)
+
+ # Check file size
+ if max_size_bytes and file_size > max_size_bytes:
+ max_mb = max_size_bytes / (1024 * 1024)
+ actual_mb = file_size / (1024 * 1024)
+ raise HTTPException(
+ status_code=413,
+ detail=f"File too large: {actual_mb:.2f}MB exceeds limit of {max_mb:.2f}MB"
+ )
+
+ # Detect MIME type
+ mime = magic.from_buffer(content, mime=True)
+
+ # Validate MIME type
+ if allowed_types and mime not in allowed_types:
+ allowed_list = ', '.join(allowed_types.keys())
+ raise HTTPException(
+ status_code=400,
+ detail=f"Invalid file type: {mime}. Allowed types: {allowed_list}"
+ )
+
+ # Generate unique filename
+ file_extension = Path(file.filename).suffix.lower()
+ if not file_extension and allowed_types and mime in allowed_types:
+ file_extension = allowed_types[mime][0]
+
+ unique_filename = f"{uuid.uuid4()}{file_extension}"
+ object_key = f"{folder}/{unique_filename}"
+
+ # Upload to R2
+ self.client.put_object(
+ Bucket=self.bucket_name,
+ Key=object_key,
+ Body=content,
+ ContentType=mime,
+ ContentLength=file_size
+ )
+
+ # Generate public URL
+ public_url = self.get_public_url(object_key)
+
+ return public_url, object_key, file_size
+
+ except ClientError as e:
+ raise HTTPException(
+ status_code=500,
+ detail=f"Failed to upload file to R2: {str(e)}"
+ )
+ except Exception as e:
+ raise HTTPException(
+ status_code=500,
+ detail=f"Upload error: {str(e)}"
+ )
+
+ async def delete_file(self, object_key: str) -> bool:
+ """
+ Delete a file from R2 storage
+
+ Args:
+ object_key: The S3 object key (path) of the file
+
+ Returns:
+ bool: True if successful
+
+ Raises:
+ HTTPException: If deletion fails
+ """
+ try:
+ self.client.delete_object(
+ Bucket=self.bucket_name,
+ Key=object_key
+ )
+ return True
+
+ except ClientError as e:
+ raise HTTPException(
+ status_code=500,
+ detail=f"Failed to delete file from R2: {str(e)}"
+ )
+
+ def get_public_url(self, object_key: str) -> str:
+ """
+ Generate public URL for an R2 object
+
+ Args:
+ object_key: The S3 object key (path) of the file
+
+ Returns:
+ str: Public URL
+ """
+ if self.public_url:
+ # Use custom domain if configured
+ return f"{self.public_url}/{object_key}"
+ else:
+ # Use default R2 public URL
+ return f"https://{self.bucket_name}.{self.account_id}.r2.cloudflarestorage.com/{object_key}"
+
+ async def get_file_size(self, object_key: str) -> int:
+ """
+ Get the size of a file in R2
+
+ Args:
+ object_key: The S3 object key (path) of the file
+
+ Returns:
+ int: File size in bytes
+
+ Raises:
+ HTTPException: If file not found
+ """
+ try:
+ response = self.client.head_object(
+ Bucket=self.bucket_name,
+ Key=object_key
+ )
+ return response['ContentLength']
+
+ except ClientError as e:
+ if e.response['Error']['Code'] == '404':
+ raise HTTPException(status_code=404, detail="File not found")
+ raise HTTPException(
+ status_code=500,
+ detail=f"Failed to get file info: {str(e)}"
+ )
+
+ async def file_exists(self, object_key: str) -> bool:
+ """
+ Check if a file exists in R2
+
+ Args:
+ object_key: The S3 object key (path) of the file
+
+ Returns:
+ bool: True if file exists, False otherwise
+ """
+ try:
+ self.client.head_object(
+ Bucket=self.bucket_name,
+ Key=object_key
+ )
+ return True
+ except ClientError:
+ return False
+
+
+# Singleton instance
+_r2_storage = None
+
+
+def get_r2_storage() -> R2Storage:
+ """
+ Get singleton instance of R2Storage
+
+ Returns:
+ R2Storage: Initialized R2 storage service
+ """
+ global _r2_storage
+ if _r2_storage is None:
+ _r2_storage = R2Storage()
+ return _r2_storage
diff --git a/requirements.txt b/requirements.txt
index 64b874f..49de466 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -9,7 +9,7 @@ certifi==2025.11.12
cffi==2.0.0
charset-normalizer==3.4.4
click==8.3.1
-cryptography==46.0.3
+cryptography==44.0.0
dnspython==2.8.0
ecdsa==0.19.1
email-validator==2.3.0
@@ -17,6 +17,7 @@ fastapi==0.110.1
flake8==7.3.0
greenlet==3.2.4
h11==0.16.0
+icalendar==6.0.1
idna==3.11
iniconfig==2.3.0
isort==7.0.0
@@ -26,6 +27,7 @@ markdown-it-py==4.0.0
mccabe==0.7.0
mdurl==0.1.2
motor==3.3.1
+msal==1.27.0
mypy==1.18.2
mypy_extensions==1.1.0
numpy==2.3.5
@@ -34,6 +36,7 @@ packaging==25.0
pandas==2.3.3
passlib==1.7.4
pathspec==0.12.1
+pillow==10.2.0
platformdirs==4.5.0
pluggy==1.6.0
psycopg2-binary==2.9.11
@@ -50,6 +53,7 @@ pytest==9.0.1
python-dateutil==2.9.0.post0
python-dotenv==1.2.1
python-jose==3.5.0
+python-magic==0.4.27
python-multipart==0.0.20
pytokens==0.3.0
pytz==2025.2
diff --git a/server.py b/server.py
index d6e45a2..710300b 100644
--- a/server.py
+++ b/server.py
@@ -1,5 +1,6 @@
-from fastapi import FastAPI, APIRouter, Depends, HTTPException, status, Request
+from fastapi import FastAPI, APIRouter, Depends, HTTPException, status, Request, UploadFile, File, Form
from fastapi.middleware.cors import CORSMiddleware
+from fastapi.responses import StreamingResponse
from sqlalchemy.orm import Session
from sqlalchemy import or_
from pydantic import BaseModel, EmailStr, Field, validator
@@ -14,7 +15,7 @@ import uuid
import secrets
from database import engine, get_db, Base
-from models import User, Event, EventRSVP, UserStatus, UserRole, RSVPStatus, SubscriptionPlan, Subscription, SubscriptionStatus
+from models import User, Event, EventRSVP, UserStatus, UserRole, RSVPStatus, SubscriptionPlan, Subscription, SubscriptionStatus, StorageUsage, EventGallery, NewsletterArchive, FinancialReport, BylawsDocument
from auth import (
get_password_hash,
verify_password,
@@ -33,6 +34,8 @@ from email_service import (
send_admin_password_reset_email
)
from payment_service import create_checkout_session, verify_webhook_signature, get_subscription_end_date
+from r2_storage import get_r2_storage
+from calendar_service import CalendarService
# Load environment variables
ROOT_DIR = Path(__file__).parent
@@ -59,6 +62,9 @@ app = FastAPI(
# Create a router with the /api prefix
api_router = APIRouter(prefix="/api")
+# Initialize calendar service
+calendar_service = CalendarService()
+
# Configure logging
logging.basicConfig(
level=logging.INFO,
@@ -163,6 +169,10 @@ class UserResponse(BaseModel):
role: str
email_verified: bool
created_at: datetime
+ # Subscription info (optional)
+ subscription_start_date: Optional[datetime] = None
+ subscription_end_date: Optional[datetime] = None
+ subscription_status: Optional[str] = None
model_config = {"from_attributes": True}
@@ -175,6 +185,36 @@ class UpdateProfileRequest(BaseModel):
state: Optional[str] = None
zipcode: Optional[str] = None
+class EnhancedProfileUpdateRequest(BaseModel):
+ """Members Only - Enhanced profile update with social media and directory settings"""
+ social_media_facebook: Optional[str] = None
+ social_media_instagram: Optional[str] = None
+ social_media_twitter: Optional[str] = None
+ social_media_linkedin: Optional[str] = None
+ show_in_directory: Optional[bool] = None
+ directory_email: Optional[str] = None
+ directory_bio: Optional[str] = None
+ directory_address: Optional[str] = None
+ directory_phone: Optional[str] = None
+ directory_dob: Optional[datetime] = None
+ directory_partner_name: Optional[str] = None
+
+class CalendarEventResponse(BaseModel):
+ """Calendar view response with user RSVP status"""
+ id: str
+ title: str
+ description: Optional[str]
+ start_at: datetime
+ end_at: datetime
+ location: str
+ capacity: Optional[int]
+ user_rsvp_status: Optional[str] = None
+ microsoft_calendar_synced: bool
+
+class SyncEventRequest(BaseModel):
+ """Request to sync event to Microsoft Calendar"""
+ event_id: str
+
class EventCreate(BaseModel):
title: str
description: Optional[str] = None
@@ -308,11 +348,22 @@ async def register(request: RegisterRequest, db: Session = Depends(get_db)):
@api_router.get("/auth/verify-email")
async def verify_email(token: str, db: Session = Depends(get_db)):
+ """Verify user email with token (idempotent - safe to call multiple times)"""
user = db.query(User).filter(User.email_verification_token == token).first()
-
+
if not user:
raise HTTPException(status_code=400, detail="Invalid verification token")
-
+
+ # If user is already verified, return success (idempotent behavior)
+ # This handles React Strict Mode's double-execution in development
+ if user.email_verified:
+ logger.info(f"Email already verified for user: {user.email}")
+ return {
+ "message": "Email is already verified",
+ "status": user.status.value
+ }
+
+ # Proceed with first-time verification
# Check if referred by current member - skip validation requirement
if user.referred_by_member_name:
referrer = db.query(User).filter(
@@ -329,13 +380,13 @@ async def verify_email(token: str, db: Session = Depends(get_db)):
user.status = UserStatus.pending_approval
else:
user.status = UserStatus.pending_approval
-
+
user.email_verified = True
user.email_verification_token = None
-
+
db.commit()
db.refresh(user)
-
+
logger.info(f"Email verified for user: {user.email}")
return {"message": "Email verified successfully", "status": user.status.value}
@@ -439,7 +490,13 @@ async def change_password(
return {"message": "Password changed successfully"}
@api_router.get("/auth/me", response_model=UserResponse)
-async def get_me(current_user: User = Depends(get_current_user)):
+async def get_me(current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
+ # Get user's active subscription if exists
+ active_subscription = db.query(Subscription).filter(
+ Subscription.user_id == current_user.id,
+ Subscription.status == SubscriptionStatus.active
+ ).first()
+
return UserResponse(
id=str(current_user.id),
email=current_user.email,
@@ -454,7 +511,10 @@ async def get_me(current_user: User = Depends(get_current_user)):
status=current_user.status.value,
role=current_user.role.value,
email_verified=current_user.email_verified,
- created_at=current_user.created_at
+ created_at=current_user.created_at,
+ subscription_start_date=active_subscription.start_date if active_subscription else None,
+ subscription_end_date=active_subscription.end_date if active_subscription else None,
+ subscription_status=active_subscription.status.value if active_subscription else None
)
# User Profile Routes
@@ -497,14 +557,565 @@ async def update_profile(
current_user.state = request.state
if request.zipcode:
current_user.zipcode = request.zipcode
-
+
current_user.updated_at = datetime.now(timezone.utc)
-
+
db.commit()
db.refresh(current_user)
-
+
return {"message": "Profile updated successfully"}
+# ==================== MEMBERS ONLY ROUTES ====================
+
+# Enhanced Profile Routes (Active Members Only)
+@api_router.get("/members/profile")
+async def get_enhanced_profile(
+ current_user: User = Depends(get_active_member),
+ db: Session = Depends(get_db)
+):
+ """Get enhanced profile with all member-only fields"""
+ return {
+ "id": str(current_user.id),
+ "email": current_user.email,
+ "first_name": current_user.first_name,
+ "last_name": current_user.last_name,
+ "phone": current_user.phone,
+ "address": current_user.address,
+ "city": current_user.city,
+ "state": current_user.state,
+ "zipcode": current_user.zipcode,
+ "date_of_birth": current_user.date_of_birth,
+ "profile_photo_url": current_user.profile_photo_url,
+ "social_media_facebook": current_user.social_media_facebook,
+ "social_media_instagram": current_user.social_media_instagram,
+ "social_media_twitter": current_user.social_media_twitter,
+ "social_media_linkedin": current_user.social_media_linkedin,
+ "show_in_directory": current_user.show_in_directory,
+ "directory_email": current_user.directory_email,
+ "directory_bio": current_user.directory_bio,
+ "directory_address": current_user.directory_address,
+ "directory_phone": current_user.directory_phone,
+ "directory_dob": current_user.directory_dob,
+ "directory_partner_name": current_user.directory_partner_name,
+ "status": current_user.status.value,
+ "role": current_user.role.value
+ }
+
+@api_router.put("/members/profile")
+async def update_enhanced_profile(
+ request: EnhancedProfileUpdateRequest,
+ current_user: User = Depends(get_active_member),
+ db: Session = Depends(get_db)
+):
+ """Update enhanced profile with social media and directory settings"""
+ if request.social_media_facebook is not None:
+ current_user.social_media_facebook = request.social_media_facebook
+ if request.social_media_instagram is not None:
+ current_user.social_media_instagram = request.social_media_instagram
+ if request.social_media_twitter is not None:
+ current_user.social_media_twitter = request.social_media_twitter
+ if request.social_media_linkedin is not None:
+ current_user.social_media_linkedin = request.social_media_linkedin
+ if request.show_in_directory is not None:
+ current_user.show_in_directory = request.show_in_directory
+ if request.directory_email is not None:
+ current_user.directory_email = request.directory_email
+ if request.directory_bio is not None:
+ current_user.directory_bio = request.directory_bio
+ if request.directory_address is not None:
+ current_user.directory_address = request.directory_address
+ if request.directory_phone is not None:
+ current_user.directory_phone = request.directory_phone
+ if request.directory_dob is not None:
+ current_user.directory_dob = request.directory_dob
+ if request.directory_partner_name is not None:
+ current_user.directory_partner_name = request.directory_partner_name
+
+ current_user.updated_at = datetime.now(timezone.utc)
+ db.commit()
+ db.refresh(current_user)
+
+ return {"message": "Enhanced profile updated successfully"}
+
+@api_router.post("/members/profile/upload-photo")
+async def upload_profile_photo(
+ file: UploadFile = File(...),
+ current_user: User = Depends(get_active_member),
+ db: Session = Depends(get_db)
+):
+ """Upload profile photo to Cloudflare R2"""
+ r2 = get_r2_storage()
+
+ # Get storage quota
+ storage = db.query(StorageUsage).first()
+ if not storage:
+ # Initialize storage tracking
+ storage = StorageUsage(
+ total_bytes_used=0,
+ max_bytes_allowed=int(os.getenv('MAX_STORAGE_BYTES', 10737418240))
+ )
+ db.add(storage)
+ db.commit()
+ db.refresh(storage)
+
+ # Get max file size from env
+ max_file_size = int(os.getenv('MAX_FILE_SIZE_BYTES', 52428800))
+
+ # Delete old profile photo if exists
+ if current_user.profile_photo_url:
+ # Extract object key from URL
+ old_key = current_user.profile_photo_url.split('/')[-1]
+ old_key = f"profiles/{old_key}"
+ try:
+ old_size = await r2.get_file_size(old_key)
+ await r2.delete_file(old_key)
+ # Update storage usage
+ storage.total_bytes_used -= old_size
+ except:
+ pass # File might not exist
+
+ # Upload new photo
+ try:
+ public_url, object_key, file_size = await r2.upload_file(
+ file=file,
+ folder="profiles",
+ allowed_types=r2.ALLOWED_IMAGE_TYPES,
+ max_size_bytes=max_file_size
+ )
+
+ # Check storage quota
+ if storage.total_bytes_used + file_size > storage.max_bytes_allowed:
+ # Rollback upload
+ await r2.delete_file(object_key)
+ raise HTTPException(
+ status_code=507,
+ detail=f"Storage limit exceeded. Used: {storage.total_bytes_used / (1024**3):.2f}GB, Limit: {storage.max_bytes_allowed / (1024**3):.2f}GB"
+ )
+
+ # Update user profile
+ current_user.profile_photo_url = public_url
+ current_user.updated_at = datetime.now(timezone.utc)
+
+ # Update storage usage
+ storage.total_bytes_used += file_size
+ storage.last_updated = datetime.now(timezone.utc)
+
+ db.commit()
+ db.refresh(current_user)
+
+ logger.info(f"Profile photo uploaded for user {current_user.email}: {file_size} bytes")
+
+ return {
+ "message": "Profile photo uploaded successfully",
+ "profile_photo_url": public_url
+ }
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error(f"Error uploading profile photo: {str(e)}")
+ raise HTTPException(status_code=500, detail=f"Upload failed: {str(e)}")
+
+@api_router.delete("/members/profile/delete-photo")
+async def delete_profile_photo(
+ current_user: User = Depends(get_active_member),
+ db: Session = Depends(get_db)
+):
+ """Delete profile photo from R2 and profile"""
+ if not current_user.profile_photo_url:
+ raise HTTPException(status_code=404, detail="No profile photo to delete")
+
+ r2 = get_r2_storage()
+ storage = db.query(StorageUsage).first()
+
+ # Extract object key from URL
+ object_key = current_user.profile_photo_url.split('/')[-1]
+ object_key = f"profiles/{object_key}"
+
+ try:
+ file_size = await r2.get_file_size(object_key)
+ await r2.delete_file(object_key)
+
+ # Update storage usage
+ if storage:
+ storage.total_bytes_used -= file_size
+ storage.last_updated = datetime.now(timezone.utc)
+
+ # Update user profile
+ current_user.profile_photo_url = None
+ current_user.updated_at = datetime.now(timezone.utc)
+
+ db.commit()
+
+ logger.info(f"Profile photo deleted for user {current_user.email}")
+
+ return {"message": "Profile photo deleted successfully"}
+ except Exception as e:
+ logger.error(f"Error deleting profile photo: {str(e)}")
+ raise HTTPException(status_code=500, detail=f"Deletion failed: {str(e)}")
+
+# Calendar Routes (Active Members Only)
+@api_router.get("/members/calendar/events", response_model=List[CalendarEventResponse])
+async def get_calendar_events(
+ start_date: Optional[datetime] = None,
+ end_date: Optional[datetime] = None,
+ current_user: User = Depends(get_active_member),
+ db: Session = Depends(get_db)
+):
+ """Get calendar events with user RSVP status"""
+ query = db.query(Event).filter(Event.published == True)
+
+ if start_date:
+ query = query.filter(Event.start_at >= start_date)
+ if end_date:
+ query = query.filter(Event.end_at <= end_date)
+
+ events = query.order_by(Event.start_at).all()
+
+ result = []
+ for event in events:
+ # Get user's RSVP status for this event
+ rsvp = db.query(EventRSVP).filter(
+ EventRSVP.event_id == event.id,
+ EventRSVP.user_id == current_user.id
+ ).first()
+
+ user_rsvp_status = rsvp.rsvp_status.value if rsvp else None
+
+ result.append(CalendarEventResponse(
+ id=str(event.id),
+ title=event.title,
+ description=event.description,
+ start_at=event.start_at,
+ end_at=event.end_at,
+ location=event.location,
+ capacity=event.capacity,
+ user_rsvp_status=user_rsvp_status,
+ microsoft_calendar_synced=event.microsoft_calendar_sync_enabled
+ ))
+
+ return result
+
+# Members Directory Route
+@api_router.get("/members/directory")
+async def get_members_directory(
+ search: Optional[str] = None,
+ current_user: User = Depends(get_active_member),
+ db: Session = Depends(get_db)
+):
+ """Get members directory - only shows active members who opted in"""
+ query = db.query(User).filter(
+ User.show_in_directory == True,
+ User.status == UserStatus.active
+ )
+
+ if search:
+ search_term = f"%{search}%"
+ query = query.filter(
+ or_(
+ User.first_name.ilike(search_term),
+ User.last_name.ilike(search_term),
+ User.directory_bio.ilike(search_term)
+ )
+ )
+
+ members = query.order_by(User.first_name, User.last_name).all()
+
+ return [
+ {
+ "id": str(member.id),
+ "first_name": member.first_name,
+ "last_name": member.last_name,
+ "profile_photo_url": member.profile_photo_url,
+ "directory_email": member.directory_email,
+ "directory_bio": member.directory_bio,
+ "directory_address": member.directory_address,
+ "directory_phone": member.directory_phone,
+ "directory_partner_name": member.directory_partner_name,
+ "social_media_facebook": member.social_media_facebook,
+ "social_media_instagram": member.social_media_instagram,
+ "social_media_twitter": member.social_media_twitter,
+ "social_media_linkedin": member.social_media_linkedin
+ }
+ for member in members
+ ]
+
+# Admin Calendar Sync Routes
+@api_router.post("/admin/calendar/sync/{event_id}")
+async def sync_event_to_microsoft(
+ event_id: str,
+ current_user: User = Depends(get_current_admin_user),
+ db: Session = Depends(get_db)
+):
+ """Sync event to Microsoft Calendar"""
+ event = db.query(Event).filter(Event.id == event_id).first()
+ if not event:
+ raise HTTPException(status_code=404, detail="Event not found")
+
+ ms_calendar = get_ms_calendar_service()
+
+ try:
+ # Sync event
+ ms_event_id = await ms_calendar.sync_event(
+ loaf_event=event,
+ existing_ms_event_id=event.microsoft_calendar_id
+ )
+
+ # Update event with MS Calendar ID
+ event.microsoft_calendar_id = ms_event_id
+ event.microsoft_calendar_sync_enabled = True
+ event.updated_at = datetime.now(timezone.utc)
+
+ db.commit()
+
+ logger.info(f"Event {event.title} synced to Microsoft Calendar by {current_user.email}")
+
+ return {
+ "message": "Event synced to Microsoft Calendar successfully",
+ "microsoft_calendar_id": ms_event_id
+ }
+ except Exception as e:
+ logger.error(f"Error syncing event to Microsoft Calendar: {str(e)}")
+ raise HTTPException(status_code=500, detail=f"Sync failed: {str(e)}")
+
+@api_router.delete("/admin/calendar/unsync/{event_id}")
+async def unsync_event_from_microsoft(
+ event_id: str,
+ current_user: User = Depends(get_current_admin_user),
+ db: Session = Depends(get_db)
+):
+ """Remove event from Microsoft Calendar"""
+ event = db.query(Event).filter(Event.id == event_id).first()
+ if not event:
+ raise HTTPException(status_code=404, detail="Event not found")
+
+ if not event.microsoft_calendar_id:
+ raise HTTPException(status_code=400, detail="Event is not synced to Microsoft Calendar")
+
+ ms_calendar = get_ms_calendar_service()
+
+ try:
+ # Delete from Microsoft Calendar
+ await ms_calendar.delete_event(event.microsoft_calendar_id)
+
+ # Update event
+ event.microsoft_calendar_id = None
+ event.microsoft_calendar_sync_enabled = False
+ event.updated_at = datetime.now(timezone.utc)
+
+ db.commit()
+
+ logger.info(f"Event {event.title} unsynced from Microsoft Calendar by {current_user.email}")
+
+ return {"message": "Event removed from Microsoft Calendar successfully"}
+ except Exception as e:
+ logger.error(f"Error removing event from Microsoft Calendar: {str(e)}")
+ raise HTTPException(status_code=500, detail=f"Unsync failed: {str(e)}")
+
+# Event Gallery Routes (Members Only)
+@api_router.get("/members/gallery")
+async def get_events_with_galleries(
+ current_user: User = Depends(get_active_member),
+ db: Session = Depends(get_db)
+):
+ """Get all events that have gallery images"""
+ # Get events that have at least one gallery image
+ events_with_galleries = db.query(Event).join(EventGallery).filter(
+ Event.published == True
+ ).distinct().order_by(Event.start_at.desc()).all()
+
+ result = []
+ for event in events_with_galleries:
+ gallery_count = db.query(EventGallery).filter(
+ EventGallery.event_id == event.id
+ ).count()
+
+ # Get first image as thumbnail
+ first_image = db.query(EventGallery).filter(
+ EventGallery.event_id == event.id
+ ).order_by(EventGallery.created_at).first()
+
+ result.append({
+ "id": str(event.id),
+ "title": event.title,
+ "description": event.description,
+ "start_at": event.start_at,
+ "location": event.location,
+ "gallery_count": gallery_count,
+ "thumbnail_url": first_image.image_url if first_image else None
+ })
+
+ return result
+
+@api_router.get("/events/{event_id}/gallery")
+async def get_event_gallery(
+ event_id: str,
+ current_user: User = Depends(get_active_member),
+ db: Session = Depends(get_db)
+):
+ """Get all gallery images for a specific event"""
+ event = db.query(Event).filter(Event.id == event_id).first()
+ if not event:
+ raise HTTPException(status_code=404, detail="Event not found")
+
+ gallery_images = db.query(EventGallery).filter(
+ EventGallery.event_id == event_id
+ ).order_by(EventGallery.created_at.desc()).all()
+
+ return [
+ {
+ "id": str(img.id),
+ "image_url": img.image_url,
+ "image_key": img.image_key,
+ "caption": img.caption,
+ "uploaded_by": str(img.uploaded_by),
+ "file_size_bytes": img.file_size_bytes,
+ "created_at": img.created_at
+ }
+ for img in gallery_images
+ ]
+
+# Admin Event Gallery Routes
+@api_router.post("/admin/events/{event_id}/gallery")
+async def upload_event_gallery_image(
+ event_id: str,
+ file: UploadFile = File(...),
+ caption: Optional[str] = None,
+ current_user: User = Depends(get_current_admin_user),
+ db: Session = Depends(get_db)
+):
+ """Upload image to event gallery (Admin only)"""
+ # Validate event exists
+ event = db.query(Event).filter(Event.id == event_id).first()
+ if not event:
+ raise HTTPException(status_code=404, detail="Event not found")
+
+ r2 = get_r2_storage()
+
+ # Get storage quota
+ storage = db.query(StorageUsage).first()
+ if not storage:
+ storage = StorageUsage(
+ total_bytes_used=0,
+ max_bytes_allowed=int(os.getenv('MAX_STORAGE_BYTES', 10737418240))
+ )
+ db.add(storage)
+ db.commit()
+ db.refresh(storage)
+
+ # Get max file size from env
+ max_file_size = int(os.getenv('MAX_FILE_SIZE_BYTES', 52428800))
+
+ try:
+ # Upload to R2
+ public_url, object_key, file_size = await r2.upload_file(
+ file=file,
+ folder=f"gallery/{event_id}",
+ allowed_types=r2.ALLOWED_IMAGE_TYPES,
+ max_size_bytes=max_file_size
+ )
+
+ # Check storage quota
+ if storage.total_bytes_used + file_size > storage.max_bytes_allowed:
+ # Rollback upload
+ await r2.delete_file(object_key)
+ raise HTTPException(
+ status_code=507,
+ detail=f"Storage limit exceeded. Used: {storage.total_bytes_used / (1024**3):.2f}GB, Limit: {storage.max_bytes_allowed / (1024**3):.2f}GB"
+ )
+
+ # Create gallery record
+ gallery_image = EventGallery(
+ event_id=event.id,
+ image_url=public_url,
+ image_key=object_key,
+ caption=caption,
+ uploaded_by=current_user.id,
+ file_size_bytes=file_size
+ )
+ db.add(gallery_image)
+
+ # Update storage usage
+ storage.total_bytes_used += file_size
+ storage.last_updated = datetime.now(timezone.utc)
+
+ db.commit()
+ db.refresh(gallery_image)
+
+ logger.info(f"Gallery image uploaded for event {event.title} by {current_user.email}: {file_size} bytes")
+
+ return {
+ "message": "Image uploaded successfully",
+ "image": {
+ "id": str(gallery_image.id),
+ "image_url": gallery_image.image_url,
+ "caption": gallery_image.caption,
+ "file_size_bytes": gallery_image.file_size_bytes
+ }
+ }
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error(f"Error uploading gallery image: {str(e)}")
+ raise HTTPException(status_code=500, detail=f"Upload failed: {str(e)}")
+
+@api_router.delete("/admin/event-gallery/{image_id}")
+async def delete_gallery_image(
+ image_id: str,
+ current_user: User = Depends(get_current_admin_user),
+ db: Session = Depends(get_db)
+):
+ """Delete image from event gallery (Admin only)"""
+ gallery_image = db.query(EventGallery).filter(EventGallery.id == image_id).first()
+ if not gallery_image:
+ raise HTTPException(status_code=404, detail="Gallery image not found")
+
+ r2 = get_r2_storage()
+ storage = db.query(StorageUsage).first()
+
+ try:
+ # Delete from R2
+ await r2.delete_file(gallery_image.image_key)
+
+ # Update storage usage
+ if storage:
+ storage.total_bytes_used -= gallery_image.file_size_bytes
+ storage.last_updated = datetime.now(timezone.utc)
+
+ # Delete from database
+ db.delete(gallery_image)
+ db.commit()
+
+ logger.info(f"Gallery image deleted by {current_user.email}: {gallery_image.image_key}")
+
+ return {"message": "Image deleted successfully"}
+ except Exception as e:
+ logger.error(f"Error deleting gallery image: {str(e)}")
+ raise HTTPException(status_code=500, detail=f"Deletion failed: {str(e)}")
+
+@api_router.put("/admin/event-gallery/{image_id}")
+async def update_gallery_image_caption(
+ image_id: str,
+ caption: str,
+ current_user: User = Depends(get_current_admin_user),
+ db: Session = Depends(get_db)
+):
+ """Update gallery image caption (Admin only)"""
+ gallery_image = db.query(EventGallery).filter(EventGallery.id == image_id).first()
+ if not gallery_image:
+ raise HTTPException(status_code=404, detail="Gallery image not found")
+
+ gallery_image.caption = caption
+ db.commit()
+ db.refresh(gallery_image)
+
+ return {
+ "message": "Caption updated successfully",
+ "image": {
+ "id": str(gallery_image.id),
+ "caption": gallery_image.caption
+ }
+ }
+
# Event Routes
@api_router.get("/events", response_model=List[EventResponse])
async def get_events(
@@ -601,10 +1212,339 @@ async def rsvp_to_event(
db.add(rsvp)
db.commit()
-
+
return {"message": "RSVP updated successfully"}
+# ============================================================================
+# Calendar Export Endpoints (Universal iCalendar .ics format)
+# ============================================================================
+
+@api_router.get("/events/{event_id}/download.ics")
+async def download_event_ics(
+ event_id: str,
+ db: Session = Depends(get_db)
+):
+ """
+ Download single event as .ics file (RFC 5545 iCalendar format)
+ No authentication required for published events
+ Works with Google Calendar, Apple Calendar, Microsoft Outlook, etc.
+ """
+ event = db.query(Event).filter(
+ Event.id == event_id,
+ Event.published == True
+ ).first()
+
+ if not event:
+ raise HTTPException(status_code=404, detail="Event not found")
+
+ # Generate UID if not exists
+ if not event.calendar_uid:
+ event.calendar_uid = calendar_service.generate_event_uid()
+ db.commit()
+
+ ics_content = calendar_service.create_single_event_calendar(event)
+
+ # Sanitize filename
+ safe_filename = "".join(c for c in event.title if c.isalnum() or c in (' ', '-', '_')).rstrip()
+ safe_filename = safe_filename.replace(' ', '_') or 'event'
+
+ return StreamingResponse(
+ iter([ics_content]),
+ media_type="text/calendar",
+ headers={
+ "Content-Disposition": f"attachment; filename={safe_filename}.ics",
+ "Cache-Control": "public, max-age=300" # Cache for 5 minutes
+ }
+ )
+
+@api_router.get("/calendars/subscribe.ics")
+async def subscribe_calendar(
+ current_user: User = Depends(get_active_member),
+ db: Session = Depends(get_db)
+):
+ """
+ Subscribe to user's RSVP'd events (live calendar feed)
+ Auto-syncs events marked as "Yes" RSVP
+ Use webcal:// protocol for auto-sync in calendar apps
+ """
+ # Get all upcoming events user RSVP'd "yes" to
+ rsvps = db.query(EventRSVP).filter(
+ EventRSVP.user_id == current_user.id,
+ EventRSVP.rsvp_status == RSVPStatus.yes
+ ).join(Event).filter(
+ Event.start_at > datetime.now(timezone.utc),
+ Event.published == True
+ ).all()
+
+ events = [rsvp.event for rsvp in rsvps]
+
+ # Generate UIDs for events that don't have them
+ for event in events:
+ if not event.calendar_uid:
+ event.calendar_uid = calendar_service.generate_event_uid()
+ db.commit()
+
+ feed_name = f"{current_user.first_name}'s LOAF Events"
+ ics_content = calendar_service.create_subscription_feed(events, feed_name)
+
+ return StreamingResponse(
+ iter([ics_content]),
+ media_type="text/calendar",
+ headers={
+ "Content-Disposition": "inline; filename=loaf-events.ics",
+ "Cache-Control": "public, max-age=3600", # Cache for 1 hour
+ "ETag": f'"{hash(ics_content)}"'
+ }
+ )
+
+@api_router.get("/calendars/all-events.ics")
+async def download_all_events(
+ current_user: User = Depends(get_active_member),
+ db: Session = Depends(get_db)
+):
+ """
+ Download all upcoming published events as .ics file (one-time download)
+ Useful for importing all events at once
+ """
+ events = db.query(Event).filter(
+ Event.published == True,
+ Event.start_at > datetime.now(timezone.utc)
+ ).order_by(Event.start_at).all()
+
+ # Generate UIDs
+ for event in events:
+ if not event.calendar_uid:
+ event.calendar_uid = calendar_service.generate_event_uid()
+ db.commit()
+
+ ics_content = calendar_service.create_subscription_feed(events, "All LOAF Events")
+
+ return StreamingResponse(
+ iter([ics_content]),
+ media_type="text/calendar",
+ headers={
+ "Content-Disposition": "attachment; filename=loaf-all-events.ics",
+ "Cache-Control": "public, max-age=600" # Cache for 10 minutes
+ }
+ )
+
+# ============================================================================
+# Newsletter Archive Routes (Members Only)
+# ============================================================================
+@api_router.get("/newsletters")
+async def get_newsletters(
+ year: Optional[int] = None,
+ current_user: User = Depends(get_active_member),
+ db: Session = Depends(get_db)
+):
+ """
+ Get all newsletters, optionally filtered by year
+ Members only
+ """
+ from models import NewsletterArchive
+
+ query = db.query(NewsletterArchive)
+
+ if year:
+ query = query.filter(
+ db.func.extract('year', NewsletterArchive.published_date) == year
+ )
+
+ newsletters = query.order_by(NewsletterArchive.published_date.desc()).all()
+
+ return [{
+ "id": str(n.id),
+ "title": n.title,
+ "description": n.description,
+ "published_date": n.published_date.isoformat(),
+ "document_url": n.document_url,
+ "document_type": n.document_type,
+ "file_size_bytes": n.file_size_bytes,
+ "created_at": n.created_at.isoformat()
+ } for n in newsletters]
+
+@api_router.get("/newsletters/years")
+async def get_newsletter_years(
+ current_user: User = Depends(get_active_member),
+ db: Session = Depends(get_db)
+):
+ """
+ Get list of years that have newsletters
+ Members only
+ """
+ from models import NewsletterArchive
+
+ years = db.query(
+ db.func.extract('year', NewsletterArchive.published_date).label('year')
+ ).distinct().order_by(db.text('year DESC')).all()
+
+ return [int(y.year) for y in years]
+
+# ============================================================================
+# Financial Reports Routes (Members Only)
+# ============================================================================
+@api_router.get("/financials")
+async def get_financial_reports(
+ current_user: User = Depends(get_active_member),
+ db: Session = Depends(get_db)
+):
+ """
+ Get all financial reports sorted by year (newest first)
+ Members only
+ """
+ from models import FinancialReport
+
+ reports = db.query(FinancialReport).order_by(
+ FinancialReport.year.desc()
+ ).all()
+
+ return [{
+ "id": str(r.id),
+ "year": r.year,
+ "title": r.title,
+ "document_url": r.document_url,
+ "document_type": r.document_type,
+ "file_size_bytes": r.file_size_bytes,
+ "created_at": r.created_at.isoformat()
+ } for r in reports]
+
+# ============================================================================
+# Bylaws Routes (Members Only)
+# ============================================================================
+@api_router.get("/bylaws/current")
+async def get_current_bylaws(
+ current_user: User = Depends(get_active_member),
+ db: Session = Depends(get_db)
+):
+ """
+ Get current bylaws document
+ Members only
+ """
+ from models import BylawsDocument
+
+ bylaws = db.query(BylawsDocument).filter(
+ BylawsDocument.is_current == True
+ ).first()
+
+ if not bylaws:
+ raise HTTPException(status_code=404, detail="No current bylaws found")
+
+ return {
+ "id": str(bylaws.id),
+ "title": bylaws.title,
+ "version": bylaws.version,
+ "effective_date": bylaws.effective_date.isoformat(),
+ "document_url": bylaws.document_url,
+ "document_type": bylaws.document_type,
+ "file_size_bytes": bylaws.file_size_bytes,
+ "is_current": bylaws.is_current,
+ "created_at": bylaws.created_at.isoformat()
+ }
+
+@api_router.get("/bylaws/history")
+async def get_bylaws_history(
+ current_user: User = Depends(get_active_member),
+ db: Session = Depends(get_db)
+):
+ """
+ Get all bylaws versions (historical)
+ Members only
+ """
+ from models import BylawsDocument
+
+ history = db.query(BylawsDocument).order_by(
+ BylawsDocument.effective_date.desc()
+ ).all()
+
+ return [{
+ "id": str(b.id),
+ "title": b.title,
+ "version": b.version,
+ "effective_date": b.effective_date.isoformat(),
+ "document_url": b.document_url,
+ "document_type": b.document_type,
+ "file_size_bytes": b.file_size_bytes,
+ "is_current": b.is_current,
+ "created_at": b.created_at.isoformat()
+ } for b in history]
+
+# ============================================================================
+# Configuration Endpoints
+# ============================================================================
+@api_router.get("/config/limits")
+async def get_config_limits():
+ """Get configuration limits for file uploads"""
+ return {
+ "max_file_size_bytes": int(os.getenv('MAX_FILE_SIZE_BYTES', 5242880)),
+ "max_storage_bytes": int(os.getenv('MAX_STORAGE_BYTES', 1073741824))
+ }
+
+# ============================================================================
# Admin Routes
+# ============================================================================
+@api_router.get("/admin/storage/usage")
+async def get_storage_usage(
+ current_user: User = Depends(get_current_admin_user),
+ db: Session = Depends(get_db)
+):
+ """Get current storage usage statistics"""
+ from models import StorageUsage
+
+ storage = db.query(StorageUsage).first()
+
+ if not storage:
+ # Initialize if doesn't exist
+ storage = StorageUsage(
+ total_bytes_used=0,
+ max_bytes_allowed=int(os.getenv('MAX_STORAGE_BYTES', 1073741824))
+ )
+ db.add(storage)
+ db.commit()
+ db.refresh(storage)
+
+ percentage = (storage.total_bytes_used / storage.max_bytes_allowed) * 100 if storage.max_bytes_allowed > 0 else 0
+
+ return {
+ "total_bytes_used": storage.total_bytes_used,
+ "max_bytes_allowed": storage.max_bytes_allowed,
+ "percentage": round(percentage, 2),
+ "available_bytes": storage.max_bytes_allowed - storage.total_bytes_used
+ }
+
+@api_router.get("/admin/storage/breakdown")
+async def get_storage_breakdown(
+ current_user: User = Depends(get_current_admin_user),
+ db: Session = Depends(get_db)
+):
+ """Get storage usage breakdown by category"""
+ from sqlalchemy import func
+ from models import User, EventGallery, NewsletterArchive, FinancialReport, BylawsDocument
+
+ # Count storage by category
+ profile_photos = db.query(func.coalesce(func.sum(User.profile_photo_size), 0)).scalar() or 0
+ gallery_images = db.query(func.coalesce(func.sum(EventGallery.file_size_bytes), 0)).scalar() or 0
+ newsletters = db.query(func.coalesce(func.sum(NewsletterArchive.file_size_bytes), 0)).filter(
+ NewsletterArchive.document_type == 'upload'
+ ).scalar() or 0
+ financials = db.query(func.coalesce(func.sum(FinancialReport.file_size_bytes), 0)).filter(
+ FinancialReport.document_type == 'upload'
+ ).scalar() or 0
+ bylaws = db.query(func.coalesce(func.sum(BylawsDocument.file_size_bytes), 0)).filter(
+ BylawsDocument.document_type == 'upload'
+ ).scalar() or 0
+
+ return {
+ "breakdown": {
+ "profile_photos": profile_photos,
+ "gallery_images": gallery_images,
+ "newsletters": newsletters,
+ "financials": financials,
+ "bylaws": bylaws
+ },
+ "total": profile_photos + gallery_images + newsletters + financials + bylaws
+ }
+
+
@api_router.get("/admin/users")
async def get_all_users(
status: Optional[str] = None,
@@ -1308,6 +2248,462 @@ async def delete_plan(
return {"message": "Plan deactivated successfully"}
+# ============================================================================
+# Admin Document Management Routes
+# ============================================================================
+
+# Newsletter Archive Admin Routes
+@api_router.post("/admin/newsletters")
+async def create_newsletter(
+ title: str = Form(...),
+ description: str = Form(None),
+ published_date: str = Form(...),
+ document_type: str = Form("google_docs"),
+ document_url: str = Form(None),
+ file: Optional[UploadFile] = File(None),
+ current_user: User = Depends(get_current_admin_user),
+ db: Session = Depends(get_db)
+):
+ """
+ Create newsletter record
+ Admin only - supports both URL links and file uploads
+ """
+ from models import NewsletterArchive, StorageUsage
+ from r2_storage import get_r2_storage
+
+ final_url = document_url
+ file_size = None
+
+ # If file uploaded, upload to R2
+ if file and document_type == 'upload':
+ r2 = get_r2_storage()
+ public_url, object_key, file_size_bytes = await r2.upload_file(
+ file=file,
+ folder="newsletters",
+ allowed_types=r2.ALLOWED_DOCUMENT_TYPES,
+ max_size_bytes=int(os.getenv('MAX_FILE_SIZE_BYTES', 5242880))
+ )
+ final_url = public_url
+ file_size = file_size_bytes
+
+ # Update storage usage
+ storage = db.query(StorageUsage).first()
+ if storage:
+ storage.total_bytes_used += file_size
+ storage.last_updated = datetime.now(timezone.utc)
+ db.commit()
+
+ newsletter = NewsletterArchive(
+ title=title,
+ description=description,
+ published_date=datetime.fromisoformat(published_date.replace('Z', '+00:00')),
+ document_url=final_url,
+ document_type=document_type,
+ file_size_bytes=file_size,
+ created_by=current_user.id
+ )
+
+ db.add(newsletter)
+ db.commit()
+ db.refresh(newsletter)
+
+ return {
+ "id": str(newsletter.id),
+ "message": "Newsletter created successfully"
+ }
+
+@api_router.put("/admin/newsletters/{newsletter_id}")
+async def update_newsletter(
+ newsletter_id: str,
+ title: str = Form(...),
+ description: str = Form(None),
+ published_date: str = Form(...),
+ document_type: str = Form("google_docs"),
+ document_url: str = Form(None),
+ file: Optional[UploadFile] = File(None),
+ current_user: User = Depends(get_current_admin_user),
+ db: Session = Depends(get_db)
+):
+ """
+ Update newsletter record
+ Admin only - supports both URL links and file uploads
+ """
+ from models import NewsletterArchive, StorageUsage
+ from r2_storage import get_r2_storage
+
+ newsletter = db.query(NewsletterArchive).filter(
+ NewsletterArchive.id == newsletter_id
+ ).first()
+
+ if not newsletter:
+ raise HTTPException(status_code=404, detail="Newsletter not found")
+
+ final_url = document_url
+ file_size = newsletter.file_size_bytes
+
+ # If file uploaded, upload to R2
+ if file and document_type == 'upload':
+ r2 = get_r2_storage()
+ public_url, object_key, file_size_bytes = await r2.upload_file(
+ file=file,
+ folder="newsletters",
+ allowed_types=r2.ALLOWED_DOCUMENT_TYPES,
+ max_size_bytes=int(os.getenv('MAX_FILE_SIZE_BYTES', 5242880))
+ )
+ final_url = public_url
+
+ # Update storage usage (subtract old, add new)
+ storage = db.query(StorageUsage).first()
+ if storage and newsletter.file_size_bytes:
+ storage.total_bytes_used -= newsletter.file_size_bytes
+ if storage:
+ storage.total_bytes_used += file_size_bytes
+ storage.last_updated = datetime.now(timezone.utc)
+ db.commit()
+
+ file_size = file_size_bytes
+
+ newsletter.title = title
+ newsletter.description = description
+ newsletter.published_date = datetime.fromisoformat(published_date.replace('Z', '+00:00'))
+ newsletter.document_url = final_url
+ newsletter.document_type = document_type
+ newsletter.file_size_bytes = file_size
+ newsletter.updated_at = datetime.now(timezone.utc)
+
+ db.commit()
+
+ return {"message": "Newsletter updated successfully"}
+
+@api_router.delete("/admin/newsletters/{newsletter_id}")
+async def delete_newsletter(
+ newsletter_id: str,
+ current_user: User = Depends(get_current_admin_user),
+ db: Session = Depends(get_db)
+):
+ """
+ Delete newsletter record
+ Admin only
+ """
+ from models import NewsletterArchive
+
+ newsletter = db.query(NewsletterArchive).filter(
+ NewsletterArchive.id == newsletter_id
+ ).first()
+
+ if not newsletter:
+ raise HTTPException(status_code=404, detail="Newsletter not found")
+
+ db.delete(newsletter)
+ db.commit()
+
+ return {"message": "Newsletter deleted successfully"}
+
+# Financial Reports Admin Routes
+@api_router.post("/admin/financials")
+async def create_financial_report(
+ year: int = Form(...),
+ title: str = Form(...),
+ document_type: str = Form("google_drive"),
+ document_url: str = Form(None),
+ file: Optional[UploadFile] = File(None),
+ current_user: User = Depends(get_current_admin_user),
+ db: Session = Depends(get_db)
+):
+ """
+ Create financial report record
+ Admin only - supports both URL links and file uploads
+ """
+ from models import FinancialReport, StorageUsage
+ from r2_storage import get_r2_storage
+
+ final_url = document_url
+ file_size = None
+
+ # If file uploaded, upload to R2
+ if file and document_type == 'upload':
+ r2 = get_r2_storage()
+ public_url, object_key, file_size_bytes = await r2.upload_file(
+ file=file,
+ folder="financials",
+ allowed_types=r2.ALLOWED_DOCUMENT_TYPES,
+ max_size_bytes=int(os.getenv('MAX_FILE_SIZE_BYTES', 5242880))
+ )
+ final_url = public_url
+ file_size = file_size_bytes
+
+ # Update storage usage
+ storage = db.query(StorageUsage).first()
+ if storage:
+ storage.total_bytes_used += file_size
+ storage.last_updated = datetime.now(timezone.utc)
+ db.commit()
+
+ report = FinancialReport(
+ year=year,
+ title=title,
+ document_url=final_url,
+ document_type=document_type,
+ file_size_bytes=file_size,
+ created_by=current_user.id
+ )
+
+ db.add(report)
+ db.commit()
+ db.refresh(report)
+
+ return {
+ "id": str(report.id),
+ "message": "Financial report created successfully"
+ }
+
+@api_router.put("/admin/financials/{report_id}")
+async def update_financial_report(
+ report_id: str,
+ year: int = Form(...),
+ title: str = Form(...),
+ document_type: str = Form("google_drive"),
+ document_url: str = Form(None),
+ file: Optional[UploadFile] = File(None),
+ current_user: User = Depends(get_current_admin_user),
+ db: Session = Depends(get_db)
+):
+ """
+ Update financial report record
+ Admin only - supports both URL links and file uploads
+ """
+ from models import FinancialReport, StorageUsage
+ from r2_storage import get_r2_storage
+
+ report = db.query(FinancialReport).filter(
+ FinancialReport.id == report_id
+ ).first()
+
+ if not report:
+ raise HTTPException(status_code=404, detail="Financial report not found")
+
+ final_url = document_url
+ file_size = report.file_size_bytes
+
+ # If file uploaded, upload to R2
+ if file and document_type == 'upload':
+ r2 = get_r2_storage()
+ public_url, object_key, file_size_bytes = await r2.upload_file(
+ file=file,
+ folder="financials",
+ allowed_types=r2.ALLOWED_DOCUMENT_TYPES,
+ max_size_bytes=int(os.getenv('MAX_FILE_SIZE_BYTES', 5242880))
+ )
+ final_url = public_url
+
+ # Update storage usage (subtract old, add new)
+ storage = db.query(StorageUsage).first()
+ if storage and report.file_size_bytes:
+ storage.total_bytes_used -= report.file_size_bytes
+ if storage:
+ storage.total_bytes_used += file_size_bytes
+ storage.last_updated = datetime.now(timezone.utc)
+ db.commit()
+
+ file_size = file_size_bytes
+
+ report.year = year
+ report.title = title
+ report.document_url = final_url
+ report.document_type = document_type
+ report.file_size_bytes = file_size
+ report.updated_at = datetime.now(timezone.utc)
+
+ db.commit()
+
+ return {"message": "Financial report updated successfully"}
+
+@api_router.delete("/admin/financials/{report_id}")
+async def delete_financial_report(
+ report_id: str,
+ current_user: User = Depends(get_current_admin_user),
+ db: Session = Depends(get_db)
+):
+ """
+ Delete financial report record
+ Admin only
+ """
+ from models import FinancialReport
+
+ report = db.query(FinancialReport).filter(
+ FinancialReport.id == report_id
+ ).first()
+
+ if not report:
+ raise HTTPException(status_code=404, detail="Financial report not found")
+
+ db.delete(report)
+ db.commit()
+
+ return {"message": "Financial report deleted successfully"}
+
+# Bylaws Admin Routes
+@api_router.post("/admin/bylaws")
+async def create_bylaws(
+ title: str = Form(...),
+ version: str = Form(...),
+ effective_date: str = Form(...),
+ document_type: str = Form("google_drive"),
+ document_url: str = Form(None),
+ is_current: bool = Form(True),
+ file: Optional[UploadFile] = File(None),
+ current_user: User = Depends(get_current_admin_user),
+ db: Session = Depends(get_db)
+):
+ """
+ Create bylaws document
+ If is_current=True, sets all others to is_current=False
+ Admin only - supports both URL links and file uploads
+ """
+ from models import BylawsDocument, StorageUsage
+ from r2_storage import get_r2_storage
+
+ final_url = document_url
+ file_size = None
+
+ # If file uploaded, upload to R2
+ if file and document_type == 'upload':
+ r2 = get_r2_storage()
+ public_url, object_key, file_size_bytes = await r2.upload_file(
+ file=file,
+ folder="bylaws",
+ allowed_types=r2.ALLOWED_DOCUMENT_TYPES,
+ max_size_bytes=int(os.getenv('MAX_FILE_SIZE_BYTES', 5242880))
+ )
+ final_url = public_url
+ file_size = file_size_bytes
+
+ # Update storage usage
+ storage = db.query(StorageUsage).first()
+ if storage:
+ storage.total_bytes_used += file_size
+ storage.last_updated = datetime.now(timezone.utc)
+ db.commit()
+
+ if is_current:
+ # Set all other bylaws to not current
+ db.query(BylawsDocument).update({"is_current": False})
+
+ bylaws = BylawsDocument(
+ title=title,
+ version=version,
+ effective_date=datetime.fromisoformat(effective_date.replace('Z', '+00:00')),
+ document_url=final_url,
+ document_type=document_type,
+ is_current=is_current,
+ file_size_bytes=file_size,
+ created_by=current_user.id
+ )
+
+ db.add(bylaws)
+ db.commit()
+ db.refresh(bylaws)
+
+ return {
+ "id": str(bylaws.id),
+ "message": "Bylaws created successfully"
+ }
+
+@api_router.put("/admin/bylaws/{bylaws_id}")
+async def update_bylaws(
+ bylaws_id: str,
+ title: str = Form(...),
+ version: str = Form(...),
+ effective_date: str = Form(...),
+ document_type: str = Form("google_drive"),
+ document_url: str = Form(None),
+ is_current: bool = Form(False),
+ file: Optional[UploadFile] = File(None),
+ current_user: User = Depends(get_current_admin_user),
+ db: Session = Depends(get_db)
+):
+ """
+ Update bylaws document
+ If is_current=True, sets all others to is_current=False
+ Admin only - supports both URL links and file uploads
+ """
+ from models import BylawsDocument, StorageUsage
+ from r2_storage import get_r2_storage
+
+ bylaws = db.query(BylawsDocument).filter(
+ BylawsDocument.id == bylaws_id
+ ).first()
+
+ if not bylaws:
+ raise HTTPException(status_code=404, detail="Bylaws not found")
+
+ final_url = document_url
+ file_size = bylaws.file_size_bytes
+
+ # If file uploaded, upload to R2
+ if file and document_type == 'upload':
+ r2 = get_r2_storage()
+ public_url, object_key, file_size_bytes = await r2.upload_file(
+ file=file,
+ folder="bylaws",
+ allowed_types=r2.ALLOWED_DOCUMENT_TYPES,
+ max_size_bytes=int(os.getenv('MAX_FILE_SIZE_BYTES', 5242880))
+ )
+ final_url = public_url
+
+ # Update storage usage (subtract old, add new)
+ storage = db.query(StorageUsage).first()
+ if storage and bylaws.file_size_bytes:
+ storage.total_bytes_used -= bylaws.file_size_bytes
+ if storage:
+ storage.total_bytes_used += file_size_bytes
+ storage.last_updated = datetime.now(timezone.utc)
+ db.commit()
+
+ file_size = file_size_bytes
+
+ if is_current:
+ # Set all other bylaws to not current
+ db.query(BylawsDocument).filter(
+ BylawsDocument.id != bylaws_id
+ ).update({"is_current": False})
+
+ bylaws.title = title
+ bylaws.version = version
+ bylaws.effective_date = datetime.fromisoformat(effective_date.replace('Z', '+00:00'))
+ bylaws.document_url = final_url
+ bylaws.document_type = document_type
+ bylaws.is_current = is_current
+ bylaws.file_size_bytes = file_size
+
+ db.commit()
+
+ return {"message": "Bylaws updated successfully"}
+
+@api_router.delete("/admin/bylaws/{bylaws_id}")
+async def delete_bylaws(
+ bylaws_id: str,
+ current_user: User = Depends(get_current_admin_user),
+ db: Session = Depends(get_db)
+):
+ """
+ Delete bylaws document
+ Admin only
+ """
+ from models import BylawsDocument
+
+ bylaws = db.query(BylawsDocument).filter(
+ BylawsDocument.id == bylaws_id
+ ).first()
+
+ if not bylaws:
+ raise HTTPException(status_code=404, detail="Bylaws not found")
+
+ db.delete(bylaws)
+ db.commit()
+
+ return {"message": "Bylaws deleted successfully"}
+
@api_router.post("/subscriptions/checkout")
async def create_checkout(
request: CheckoutRequest,