forked from andika/membership-be
9146 lines
322 KiB
Python
9146 lines
322 KiB
Python
from fastapi import FastAPI, APIRouter, Depends, HTTPException, status, Request, UploadFile, File, Form, Path as PathParam
|
|
from fastapi.middleware.cors import CORSMiddleware
|
|
from fastapi.responses import StreamingResponse
|
|
from sqlalchemy.orm import Session
|
|
from sqlalchemy import or_
|
|
from pydantic import BaseModel, EmailStr, Field, validator
|
|
from typing import List, Optional, Literal
|
|
from datetime import datetime, timedelta, timezone
|
|
from dotenv import load_dotenv
|
|
from pathlib import Path
|
|
from contextlib import asynccontextmanager
|
|
import os
|
|
import logging
|
|
import uuid
|
|
import secrets
|
|
import csv
|
|
import io
|
|
|
|
from database import engine, get_db, Base
|
|
from models import User, Event, EventRSVP, UserStatus, UserRole, RSVPStatus, SubscriptionPlan, Subscription, SubscriptionStatus, StorageUsage, EventGallery, NewsletterArchive, FinancialReport, BylawsDocument, Permission, RolePermission, Role, UserInvitation, InvitationStatus, ImportJob, ImportJobStatus, ImportRollbackAudit, Donation, DonationType, DonationStatus, SystemSettings, PaymentMethod, PaymentMethodType
|
|
from auth import (
|
|
get_password_hash,
|
|
verify_password,
|
|
create_access_token,
|
|
get_current_user,
|
|
get_current_admin_user,
|
|
get_current_superadmin,
|
|
get_active_member,
|
|
get_user_permissions,
|
|
require_permission,
|
|
create_password_reset_token,
|
|
verify_reset_token,
|
|
get_user_role_code
|
|
)
|
|
from email_service import (
|
|
send_verification_email,
|
|
send_approval_notification,
|
|
send_payment_prompt_email,
|
|
send_password_reset_email,
|
|
send_admin_password_reset_email
|
|
)
|
|
from payment_service import create_checkout_session, verify_webhook_signature, get_subscription_end_date
|
|
from r2_storage import get_r2_storage
|
|
from calendar_service import CalendarService
|
|
from wordpress_parser import analyze_csv, format_preview_for_display
|
|
|
|
# Load environment variables
|
|
ROOT_DIR = Path(__file__).parent
|
|
load_dotenv(ROOT_DIR / '.env')
|
|
|
|
# Create database tables
|
|
Base.metadata.create_all(bind=engine)
|
|
|
|
# Lifespan event handler (replaces deprecated on_event)
|
|
@asynccontextmanager
|
|
async def lifespan(app: FastAPI):
|
|
# Startup
|
|
logger.info("Application started")
|
|
yield
|
|
# Shutdown
|
|
logger.info("Application shutdown")
|
|
|
|
# Environment detection
|
|
ENVIRONMENT = os.environ.get('ENVIRONMENT', 'development')
|
|
IS_PRODUCTION = ENVIRONMENT == 'production'
|
|
|
|
# Security: Disable API documentation in production
|
|
if IS_PRODUCTION:
|
|
print("🔒 Production mode: API documentation disabled")
|
|
app_config = {
|
|
"lifespan": lifespan,
|
|
"root_path": "/membership",
|
|
"docs_url": None, # Disable /docs
|
|
"redoc_url": None, # Disable /redoc
|
|
"openapi_url": None # Disable /openapi.json
|
|
}
|
|
else:
|
|
print("🔓 Development mode: API documentation enabled at /docs and /redoc")
|
|
app_config = {
|
|
"lifespan": lifespan,
|
|
"root_path": "/membership"
|
|
}
|
|
|
|
# Create the main app
|
|
app = FastAPI(**app_config)
|
|
|
|
# Create a router with the /api prefix
|
|
api_router = APIRouter(prefix="/api")
|
|
|
|
# Initialize calendar service
|
|
calendar_service = CalendarService()
|
|
|
|
# Configure logging
|
|
logging.basicConfig(
|
|
level=logging.INFO,
|
|
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
|
)
|
|
logger = logging.getLogger(__name__)
|
|
|
|
# ============================================================
|
|
# Health Check Endpoint (for Kubernetes probes)
|
|
# ============================================================
|
|
|
|
@app.get("/health")
|
|
async def health_check():
|
|
"""Health check endpoint for Kubernetes liveness/readiness probes."""
|
|
return {"status": "healthy", "service": "membership-backend"}
|
|
|
|
# ============================================================
|
|
# Helper Functions
|
|
# ============================================================
|
|
|
|
def set_user_role(user: User, role_enum: UserRole, db: Session):
|
|
"""
|
|
Set user's role in both legacy enum and dynamic role_id.
|
|
Ensures consistency between old and new role systems during Phase 3 migration.
|
|
|
|
Args:
|
|
user: User object to update
|
|
role_enum: UserRole enum value
|
|
db: Database session
|
|
"""
|
|
# Set legacy enum
|
|
user.role = role_enum
|
|
|
|
# Set dynamic role_id
|
|
role = db.query(Role).filter(Role.code == role_enum.value).first()
|
|
if role:
|
|
user.role_id = role.id
|
|
else:
|
|
logger.warning(f"Role not found for code: {role_enum.value}")
|
|
|
|
# ============================================================
|
|
# Pydantic Models
|
|
# ============================================================
|
|
class RegisterRequest(BaseModel):
|
|
"""Dynamic registration request - validates against registration schema"""
|
|
|
|
# Fixed required fields (always present)
|
|
first_name: str
|
|
last_name: str
|
|
email: EmailStr
|
|
password: str = Field(min_length=6)
|
|
accepts_tos: bool = False
|
|
|
|
# Step 1: Personal & Partner Information (optional for dynamic schema)
|
|
phone: Optional[str] = None
|
|
address: Optional[str] = None
|
|
city: Optional[str] = None
|
|
state: Optional[str] = None
|
|
zipcode: Optional[str] = None
|
|
date_of_birth: Optional[datetime] = None
|
|
lead_sources: Optional[List[str]] = None
|
|
partner_first_name: Optional[str] = None
|
|
partner_last_name: Optional[str] = None
|
|
partner_is_member: Optional[bool] = False
|
|
partner_plan_to_become_member: Optional[bool] = False
|
|
|
|
# Step 2: Newsletter, Volunteer & Scholarship
|
|
referred_by_member_name: Optional[str] = None
|
|
newsletter_publish_name: Optional[bool] = False
|
|
newsletter_publish_photo: Optional[bool] = False
|
|
newsletter_publish_birthday: Optional[bool] = False
|
|
newsletter_publish_none: Optional[bool] = False
|
|
volunteer_interests: Optional[List[str]] = []
|
|
scholarship_requested: Optional[bool] = False
|
|
scholarship_reason: Optional[str] = None
|
|
|
|
# Step 3: Directory Settings
|
|
show_in_directory: Optional[bool] = False
|
|
directory_email: Optional[str] = None
|
|
directory_bio: Optional[str] = None
|
|
directory_address: Optional[str] = None
|
|
directory_phone: Optional[str] = None
|
|
directory_dob: Optional[datetime] = None
|
|
directory_partner_name: Optional[str] = None
|
|
|
|
# Allow extra fields for custom registration data
|
|
class Config:
|
|
extra = 'allow'
|
|
|
|
@validator('accepts_tos')
|
|
def tos_must_be_accepted(cls, v):
|
|
if not v:
|
|
raise ValueError('You must accept the Terms of Service to register')
|
|
return v
|
|
|
|
class LoginRequest(BaseModel):
|
|
email: EmailStr
|
|
password: str
|
|
|
|
class LoginResponse(BaseModel):
|
|
access_token: str
|
|
token_type: str
|
|
user: dict
|
|
|
|
class ForgotPasswordRequest(BaseModel):
|
|
email: EmailStr
|
|
|
|
class ResetPasswordRequest(BaseModel):
|
|
token: str
|
|
new_password: str = Field(min_length=6)
|
|
|
|
class ChangePasswordRequest(BaseModel):
|
|
current_password: str
|
|
new_password: str = Field(min_length=6)
|
|
|
|
class AdminPasswordUpdateRequest(BaseModel):
|
|
force_change: bool = True
|
|
|
|
class UserResponse(BaseModel):
|
|
id: str
|
|
email: str
|
|
first_name: str
|
|
last_name: str
|
|
phone: Optional[str] = None
|
|
address: Optional[str] = None
|
|
city: Optional[str] = None
|
|
state: Optional[str] = None
|
|
zipcode: Optional[str] = None
|
|
date_of_birth: Optional[datetime] = None
|
|
status: str
|
|
role: str
|
|
email_verified: bool
|
|
created_at: datetime
|
|
member_since: Optional[datetime] = None # Date when user became active member
|
|
# Profile
|
|
profile_photo_url: Optional[str] = None
|
|
# Subscription info (optional)
|
|
subscription_start_date: Optional[datetime] = None
|
|
subscription_end_date: Optional[datetime] = None
|
|
subscription_status: Optional[str] = None
|
|
# Partner information
|
|
partner_first_name: Optional[str] = None
|
|
partner_last_name: Optional[str] = None
|
|
partner_is_member: Optional[bool] = None
|
|
partner_plan_to_become_member: Optional[bool] = None
|
|
# Newsletter preferences
|
|
newsletter_publish_name: Optional[bool] = None
|
|
newsletter_publish_photo: Optional[bool] = None
|
|
newsletter_publish_birthday: Optional[bool] = None
|
|
newsletter_publish_none: Optional[bool] = None
|
|
# Volunteer interests
|
|
volunteer_interests: Optional[list] = None
|
|
# Directory settings
|
|
show_in_directory: Optional[bool] = None
|
|
directory_email: Optional[str] = None
|
|
directory_bio: Optional[str] = None
|
|
directory_address: Optional[str] = None
|
|
directory_phone: Optional[str] = None
|
|
directory_dob: Optional[datetime] = None
|
|
directory_partner_name: Optional[str] = None
|
|
|
|
model_config = {"from_attributes": True}
|
|
|
|
@validator('id', 'status', 'role', pre=True)
|
|
def convert_to_string(cls, v):
|
|
"""Convert UUID and Enum types to strings"""
|
|
if hasattr(v, 'value'):
|
|
return v.value
|
|
return str(v)
|
|
|
|
class UpdateProfileRequest(BaseModel):
|
|
# Basic personal information
|
|
first_name: Optional[str] = None
|
|
last_name: Optional[str] = None
|
|
phone: Optional[str] = None
|
|
address: Optional[str] = None
|
|
city: Optional[str] = None
|
|
state: Optional[str] = None
|
|
zipcode: Optional[str] = None
|
|
|
|
# Partner information
|
|
partner_first_name: Optional[str] = None
|
|
partner_last_name: Optional[str] = None
|
|
partner_is_member: Optional[bool] = None
|
|
partner_plan_to_become_member: Optional[bool] = None
|
|
|
|
# Newsletter preferences
|
|
newsletter_publish_name: Optional[bool] = None
|
|
newsletter_publish_photo: Optional[bool] = None
|
|
newsletter_publish_birthday: Optional[bool] = None
|
|
newsletter_publish_none: Optional[bool] = None
|
|
|
|
# Volunteer interests (array of strings)
|
|
volunteer_interests: Optional[list] = None
|
|
|
|
# Directory settings
|
|
show_in_directory: Optional[bool] = None
|
|
directory_email: Optional[str] = None
|
|
directory_bio: Optional[str] = None
|
|
directory_address: Optional[str] = None
|
|
directory_phone: Optional[str] = None
|
|
directory_dob: Optional[datetime] = None
|
|
directory_partner_name: Optional[str] = None
|
|
|
|
@validator('directory_dob', pre=True)
|
|
def empty_str_to_none(cls, v):
|
|
"""Convert empty string to None for optional datetime field"""
|
|
if v == '' or v is None:
|
|
return None
|
|
return v
|
|
|
|
class EnhancedProfileUpdateRequest(BaseModel):
|
|
"""Members Only - Enhanced profile update with social media and directory settings"""
|
|
social_media_facebook: Optional[str] = None
|
|
social_media_instagram: Optional[str] = None
|
|
social_media_twitter: Optional[str] = None
|
|
social_media_linkedin: Optional[str] = None
|
|
show_in_directory: Optional[bool] = None
|
|
directory_email: Optional[str] = None
|
|
directory_bio: Optional[str] = None
|
|
directory_address: Optional[str] = None
|
|
directory_phone: Optional[str] = None
|
|
directory_dob: Optional[datetime] = None
|
|
directory_partner_name: Optional[str] = None
|
|
|
|
@validator('directory_dob', pre=True)
|
|
def empty_str_to_none(cls, v):
|
|
"""Convert empty string to None for optional datetime field"""
|
|
if v == '' or v is None:
|
|
return None
|
|
return v
|
|
|
|
class CalendarEventResponse(BaseModel):
|
|
"""Calendar view response with user RSVP status"""
|
|
id: str
|
|
title: str
|
|
description: Optional[str]
|
|
start_at: datetime
|
|
end_at: datetime
|
|
location: str
|
|
capacity: Optional[int]
|
|
user_rsvp_status: Optional[str] = None
|
|
microsoft_calendar_synced: bool
|
|
|
|
class SyncEventRequest(BaseModel):
|
|
"""Request to sync event to Microsoft Calendar"""
|
|
event_id: str
|
|
|
|
class EventCreate(BaseModel):
|
|
title: str
|
|
description: Optional[str] = None
|
|
start_at: datetime
|
|
end_at: datetime
|
|
location: str
|
|
capacity: Optional[int] = None
|
|
published: bool = False
|
|
|
|
class EventUpdate(BaseModel):
|
|
title: Optional[str] = None
|
|
description: Optional[str] = None
|
|
start_at: Optional[datetime] = None
|
|
end_at: Optional[datetime] = None
|
|
location: Optional[str] = None
|
|
capacity: Optional[int] = None
|
|
published: Optional[bool] = None
|
|
|
|
class EventResponse(BaseModel):
|
|
id: str
|
|
title: str
|
|
description: Optional[str]
|
|
start_at: datetime
|
|
end_at: datetime
|
|
location: str
|
|
capacity: Optional[int]
|
|
published: bool
|
|
created_by: str
|
|
created_at: datetime
|
|
rsvp_count: Optional[int] = 0
|
|
user_rsvp_status: Optional[str] = None
|
|
|
|
model_config = {"from_attributes": True}
|
|
|
|
class RSVPRequest(BaseModel):
|
|
rsvp_status: str
|
|
|
|
class AttendanceUpdate(BaseModel):
|
|
user_id: str
|
|
attended: bool
|
|
|
|
class BatchAttendanceUpdate(BaseModel):
|
|
updates: list[AttendanceUpdate]
|
|
|
|
class UpdateUserStatusRequest(BaseModel):
|
|
status: str
|
|
|
|
class ManualPaymentRequest(BaseModel):
|
|
plan_id: str = Field(..., description="Subscription plan ID")
|
|
amount_cents: int = Field(..., ge=3000, description="Payment amount in cents (minimum $30)")
|
|
payment_date: datetime = Field(..., description="Date payment was received")
|
|
payment_method: str = Field(..., description="Payment method: cash, bank_transfer, check, other")
|
|
use_custom_period: bool = Field(False, description="Whether to use custom dates instead of plan's billing cycle")
|
|
custom_period_start: Optional[datetime] = Field(None, description="Custom subscription start date")
|
|
custom_period_end: Optional[datetime] = Field(None, description="Custom subscription end date")
|
|
override_plan_dates: bool = Field(False, description="Override plan's custom billing cycle with admin-specified dates")
|
|
notes: Optional[str] = Field(None, description="Admin notes about payment")
|
|
|
|
@validator('amount_cents')
|
|
def validate_amount(cls, v):
|
|
if v < 3000:
|
|
raise ValueError('Amount must be at least $30 (3000 cents)')
|
|
return v
|
|
|
|
# ============================================================
|
|
# Permission Management Pydantic Models
|
|
# ============================================================
|
|
|
|
class PermissionResponse(BaseModel):
|
|
id: str
|
|
code: str
|
|
name: str
|
|
description: Optional[str]
|
|
module: str
|
|
created_at: datetime
|
|
|
|
class Config:
|
|
from_attributes = True
|
|
|
|
class AssignPermissionsRequest(BaseModel):
|
|
permission_codes: List[str] = Field(..., description="List of permission codes to assign to the role")
|
|
|
|
# ============================================================
|
|
# Role Management Pydantic Models
|
|
# ============================================================
|
|
|
|
class RoleResponse(BaseModel):
|
|
id: str
|
|
code: str
|
|
name: str
|
|
description: Optional[str]
|
|
is_system_role: bool
|
|
created_at: datetime
|
|
updated_at: datetime
|
|
permission_count: Optional[int] = 0 # Number of permissions assigned to this role
|
|
|
|
class Config:
|
|
from_attributes = True
|
|
|
|
class CreateRoleRequest(BaseModel):
|
|
code: str = Field(..., min_length=2, max_length=50, description="Unique role code (e.g., 'finance', 'editor')")
|
|
name: str = Field(..., min_length=2, max_length=100, description="Display name (e.g., 'Finance Manager')")
|
|
description: Optional[str] = Field(None, description="Role description")
|
|
permission_codes: List[str] = Field(default=[], description="List of permission codes to assign")
|
|
|
|
class UpdateRoleRequest(BaseModel):
|
|
name: Optional[str] = Field(None, min_length=2, max_length=100)
|
|
description: Optional[str] = None
|
|
|
|
class AssignRolePermissionsRequest(BaseModel):
|
|
permission_codes: List[str] = Field(..., description="List of permission codes to assign to the role")
|
|
|
|
# ============================================================
|
|
# User Creation & Invitation Pydantic Models
|
|
# ============================================================
|
|
|
|
class CreateUserRequest(BaseModel):
|
|
email: EmailStr
|
|
password: str = Field(..., min_length=8)
|
|
first_name: str
|
|
last_name: str
|
|
phone: str
|
|
role: str # "member", "admin", "superadmin"
|
|
|
|
# Optional member fields
|
|
address: Optional[str] = None
|
|
city: Optional[str] = None
|
|
state: Optional[str] = None
|
|
zipcode: Optional[str] = None
|
|
date_of_birth: Optional[datetime] = None
|
|
member_since: Optional[datetime] = None
|
|
|
|
class InviteUserRequest(BaseModel):
|
|
email: EmailStr
|
|
role: str # "member", "admin", "superadmin"
|
|
|
|
# Optional pre-fill information
|
|
first_name: Optional[str] = None
|
|
last_name: Optional[str] = None
|
|
phone: Optional[str] = None
|
|
|
|
class AdminUpdateUserRequest(BaseModel):
|
|
"""Admin-only endpoint for updating user profile fields"""
|
|
first_name: Optional[str] = None
|
|
last_name: Optional[str] = None
|
|
phone: Optional[str] = None
|
|
address: Optional[str] = None
|
|
city: Optional[str] = None
|
|
state: Optional[str] = None
|
|
zipcode: Optional[str] = None
|
|
date_of_birth: Optional[datetime] = None
|
|
member_since: Optional[datetime] = None
|
|
# Partner information
|
|
partner_first_name: Optional[str] = None
|
|
partner_last_name: Optional[str] = None
|
|
partner_is_member: Optional[bool] = None
|
|
partner_plan_to_become_member: Optional[bool] = None
|
|
referred_by_member_name: Optional[str] = None
|
|
|
|
@validator('date_of_birth', 'member_since', pre=True)
|
|
def empty_str_to_none(cls, v):
|
|
"""Convert empty string to None for optional datetime fields"""
|
|
if v == '' or v is None:
|
|
return None
|
|
return v
|
|
|
|
class InvitationResponse(BaseModel):
|
|
id: str
|
|
email: str
|
|
role: str
|
|
status: str
|
|
first_name: Optional[str]
|
|
last_name: Optional[str]
|
|
phone: Optional[str]
|
|
invited_by: str
|
|
invited_at: datetime
|
|
expires_at: datetime
|
|
accepted_at: Optional[datetime]
|
|
|
|
class Config:
|
|
from_attributes = True
|
|
|
|
class AcceptInvitationRequest(BaseModel):
|
|
token: str
|
|
password: str = Field(..., min_length=8)
|
|
|
|
# Complete profile information
|
|
first_name: str
|
|
last_name: str
|
|
phone: str
|
|
|
|
# Member-specific fields (optional for staff)
|
|
address: Optional[str] = None
|
|
city: Optional[str] = None
|
|
state: Optional[str] = None
|
|
zipcode: Optional[str] = None
|
|
date_of_birth: Optional[datetime] = None
|
|
|
|
class ChangeRoleRequest(BaseModel):
|
|
role: str
|
|
role_id: Optional[str] = None # For custom roles
|
|
|
|
# Auth Routes
|
|
@api_router.post("/auth/register")
|
|
async def register(request: RegisterRequest, db: Session = Depends(get_db)):
|
|
# Check if email already exists
|
|
existing_user = db.query(User).filter(User.email == request.email).first()
|
|
if existing_user:
|
|
raise HTTPException(status_code=400, detail="Email already registered")
|
|
|
|
# Get registration schema for dynamic validation
|
|
schema = get_registration_schema(db)
|
|
|
|
# Convert request to dict for dynamic validation
|
|
request_data = request.dict(exclude_unset=False)
|
|
|
|
# Perform dynamic schema validation
|
|
is_valid, validation_errors = validate_dynamic_registration(request_data, schema)
|
|
if not is_valid:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail={"message": "Validation failed", "errors": validation_errors}
|
|
)
|
|
|
|
# Split data into User model fields and custom fields
|
|
user_data, custom_data = split_registration_data(request_data, schema)
|
|
|
|
# Generate verification token
|
|
verification_token = secrets.token_urlsafe(32)
|
|
|
|
# Create user with known fields
|
|
user = User(
|
|
# Account credentials (Step 4)
|
|
email=request.email,
|
|
password_hash=get_password_hash(request.password),
|
|
|
|
# Personal information (Step 1)
|
|
first_name=request.first_name,
|
|
last_name=request.last_name,
|
|
phone=user_data.get('phone') or request.phone,
|
|
address=user_data.get('address') or request.address,
|
|
city=user_data.get('city') or request.city,
|
|
state=user_data.get('state') or request.state,
|
|
zipcode=user_data.get('zipcode') or request.zipcode,
|
|
date_of_birth=user_data.get('date_of_birth') or request.date_of_birth,
|
|
lead_sources=user_data.get('lead_sources') or request.lead_sources or [],
|
|
|
|
# Partner information (Step 1)
|
|
partner_first_name=user_data.get('partner_first_name') or request.partner_first_name,
|
|
partner_last_name=user_data.get('partner_last_name') or request.partner_last_name,
|
|
partner_is_member=user_data.get('partner_is_member', request.partner_is_member) or False,
|
|
partner_plan_to_become_member=user_data.get('partner_plan_to_become_member', request.partner_plan_to_become_member) or False,
|
|
|
|
# Referral (Step 2)
|
|
referred_by_member_name=user_data.get('referred_by_member_name') or request.referred_by_member_name,
|
|
|
|
# Newsletter publication preferences (Step 2)
|
|
newsletter_publish_name=user_data.get('newsletter_publish_name', request.newsletter_publish_name) or False,
|
|
newsletter_publish_photo=user_data.get('newsletter_publish_photo', request.newsletter_publish_photo) or False,
|
|
newsletter_publish_birthday=user_data.get('newsletter_publish_birthday', request.newsletter_publish_birthday) or False,
|
|
newsletter_publish_none=user_data.get('newsletter_publish_none', request.newsletter_publish_none) or False,
|
|
|
|
# Volunteer interests (Step 2)
|
|
volunteer_interests=user_data.get('volunteer_interests') or request.volunteer_interests or [],
|
|
|
|
# Scholarship (Step 2)
|
|
scholarship_requested=user_data.get('scholarship_requested', request.scholarship_requested) or False,
|
|
scholarship_reason=user_data.get('scholarship_reason') or request.scholarship_reason,
|
|
|
|
# Directory settings (Step 3)
|
|
show_in_directory=user_data.get('show_in_directory', request.show_in_directory) or False,
|
|
directory_email=user_data.get('directory_email') or request.directory_email,
|
|
directory_bio=user_data.get('directory_bio') or request.directory_bio,
|
|
directory_address=user_data.get('directory_address') or request.directory_address,
|
|
directory_phone=user_data.get('directory_phone') or request.directory_phone,
|
|
directory_dob=user_data.get('directory_dob') or request.directory_dob,
|
|
directory_partner_name=user_data.get('directory_partner_name') or request.directory_partner_name,
|
|
|
|
# Terms of Service acceptance (Step 4)
|
|
accepts_tos=request.accepts_tos,
|
|
tos_accepted_at=datetime.now(timezone.utc) if request.accepts_tos else None,
|
|
|
|
# Custom registration data for dynamic fields
|
|
custom_registration_data=custom_data if custom_data else {},
|
|
|
|
# Status fields
|
|
status=UserStatus.pending_email,
|
|
role=UserRole.guest,
|
|
email_verified=False,
|
|
email_verification_token=verification_token
|
|
)
|
|
|
|
db.add(user)
|
|
db.commit()
|
|
db.refresh(user)
|
|
|
|
# Send verification email
|
|
await send_verification_email(user.email, verification_token)
|
|
|
|
logger.info(f"User registered: {user.email}")
|
|
|
|
return {"message": "Registration successful. Please check your email to verify your account."}
|
|
|
|
@api_router.get("/auth/verify-email")
|
|
async def verify_email(token: str, db: Session = Depends(get_db)):
|
|
"""Verify user email with token (idempotent - safe to call multiple times)"""
|
|
user = db.query(User).filter(User.email_verification_token == token).first()
|
|
|
|
if not user:
|
|
raise HTTPException(status_code=400, detail="Invalid verification token")
|
|
|
|
# If user is already verified, return success (idempotent behavior)
|
|
# This handles React Strict Mode's double-execution in development
|
|
if user.email_verified:
|
|
logger.info(f"Email already verified for user: {user.email}")
|
|
return {
|
|
"message": "Email is already verified",
|
|
"status": user.status.value
|
|
}
|
|
|
|
# Proceed with first-time verification
|
|
# Check if referred by current member - skip validation requirement
|
|
if user.referred_by_member_name:
|
|
referrer = db.query(User).filter(
|
|
or_(
|
|
User.first_name + ' ' + User.last_name == user.referred_by_member_name,
|
|
User.email == user.referred_by_member_name
|
|
),
|
|
User.status == UserStatus.active
|
|
).first()
|
|
|
|
if referrer:
|
|
user.status = UserStatus.pre_validated
|
|
else:
|
|
user.status = UserStatus.pending_validation
|
|
else:
|
|
user.status = UserStatus.pending_validation
|
|
|
|
user.email_verified = True
|
|
# Don't clear token immediately - keeps endpoint idempotent for React StrictMode double-calls
|
|
# Token will be cleared on first successful login
|
|
|
|
db.commit()
|
|
db.refresh(user)
|
|
|
|
logger.info(f"Email verified for user: {user.email}")
|
|
|
|
return {"message": "Email verified successfully", "status": user.status.value}
|
|
|
|
@api_router.post("/auth/resend-verification-email")
|
|
async def resend_verification_email(
|
|
current_user: User = Depends(get_current_user),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""User requests to resend their verification email"""
|
|
|
|
# Check if email already verified
|
|
if current_user.email_verified:
|
|
raise HTTPException(status_code=400, detail="Email is already verified")
|
|
|
|
# Generate new token
|
|
verification_token = secrets.token_urlsafe(32)
|
|
current_user.email_verification_token = verification_token
|
|
db.commit()
|
|
|
|
# Send verification email
|
|
await send_verification_email(current_user.email, verification_token)
|
|
|
|
logger.info(f"Verification email resent to: {current_user.email}")
|
|
|
|
return {"message": "Verification email has been resent. Please check your inbox."}
|
|
|
|
@api_router.post("/auth/login", response_model=LoginResponse)
|
|
async def login(request: LoginRequest, db: Session = Depends(get_db)):
|
|
user = db.query(User).filter(User.email == request.email).first()
|
|
|
|
if not user or not verify_password(request.password, user.password_hash):
|
|
raise HTTPException(
|
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
detail="Incorrect email or password"
|
|
)
|
|
|
|
access_token = create_access_token(data={"sub": str(user.id)})
|
|
|
|
# Clear verification token on first successful login after verification
|
|
# Don't let this fail the login if database commit fails
|
|
if user.email_verified and user.email_verification_token:
|
|
try:
|
|
user.email_verification_token = None
|
|
db.commit()
|
|
except Exception as e:
|
|
logger.warning(f"Failed to clear verification token for user {user.id}: {str(e)}")
|
|
db.rollback()
|
|
# Continue with login - this is not critical
|
|
|
|
return {
|
|
"access_token": access_token,
|
|
"token_type": "bearer",
|
|
"user": {
|
|
"id": str(user.id),
|
|
"email": user.email,
|
|
"first_name": user.first_name,
|
|
"last_name": user.last_name,
|
|
"status": user.status.value,
|
|
"role": get_user_role_code(user),
|
|
"email_verified": user.email_verified,
|
|
"force_password_change": user.force_password_change
|
|
}
|
|
}
|
|
|
|
@api_router.post("/auth/forgot-password")
|
|
async def forgot_password(request: ForgotPasswordRequest, db: Session = Depends(get_db)):
|
|
"""Request password reset - sends email with reset link"""
|
|
user = db.query(User).filter(User.email == request.email).first()
|
|
|
|
# Always return success (security: don't reveal if email exists)
|
|
if user:
|
|
token = create_password_reset_token(user, db)
|
|
reset_url = f"{os.getenv('FRONTEND_URL')}/reset-password?token={token}"
|
|
|
|
await send_password_reset_email(user.email, user.first_name, reset_url)
|
|
|
|
return {"message": "If email exists, reset link has been sent"}
|
|
|
|
@api_router.post("/auth/reset-password")
|
|
async def reset_password(request: ResetPasswordRequest, db: Session = Depends(get_db)):
|
|
"""Complete password reset using token"""
|
|
user = verify_reset_token(request.token, db)
|
|
|
|
if not user:
|
|
raise HTTPException(status_code=400, detail="Invalid or expired reset token")
|
|
|
|
# Update password
|
|
user.password_hash = get_password_hash(request.new_password)
|
|
user.password_reset_token = None
|
|
user.password_reset_expires = None
|
|
user.force_password_change = False # Reset flag if it was set
|
|
db.commit()
|
|
|
|
return {"message": "Password reset successful"}
|
|
|
|
@api_router.put("/users/change-password")
|
|
async def change_password(
|
|
request: ChangePasswordRequest,
|
|
current_user: User = Depends(get_current_user),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""User changes their own password"""
|
|
# Verify current password
|
|
if not verify_password(request.current_password, current_user.password_hash):
|
|
raise HTTPException(status_code=400, detail="Current password is incorrect")
|
|
|
|
# Update password
|
|
current_user.password_hash = get_password_hash(request.new_password)
|
|
current_user.force_password_change = False # Clear flag if set
|
|
db.commit()
|
|
|
|
return {"message": "Password changed successfully"}
|
|
|
|
@api_router.get("/auth/me", response_model=UserResponse)
|
|
async def get_me(current_user: User = Depends(get_current_user), db: Session = Depends(get_db)):
|
|
# Get user's active subscription if exists
|
|
active_subscription = db.query(Subscription).filter(
|
|
Subscription.user_id == current_user.id,
|
|
Subscription.status == SubscriptionStatus.active
|
|
).first()
|
|
|
|
return UserResponse(
|
|
id=str(current_user.id),
|
|
email=current_user.email,
|
|
first_name=current_user.first_name,
|
|
last_name=current_user.last_name,
|
|
phone=current_user.phone,
|
|
address=current_user.address,
|
|
city=current_user.city,
|
|
state=current_user.state,
|
|
zipcode=current_user.zipcode,
|
|
date_of_birth=current_user.date_of_birth,
|
|
status=current_user.status.value,
|
|
role=get_user_role_code(current_user),
|
|
email_verified=current_user.email_verified,
|
|
created_at=current_user.created_at,
|
|
subscription_start_date=active_subscription.start_date if active_subscription else None,
|
|
subscription_end_date=active_subscription.end_date if active_subscription else None,
|
|
subscription_status=active_subscription.status.value if active_subscription else None
|
|
)
|
|
|
|
@api_router.get("/auth/permissions")
|
|
async def get_my_permissions(
|
|
current_user: User = Depends(get_current_user),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Get current user's permissions based on their role
|
|
Returns list of permission codes (e.g., ['users.view', 'events.create'])
|
|
"""
|
|
permissions = await get_user_permissions(current_user, db)
|
|
return {
|
|
"permissions": permissions,
|
|
"role": get_user_role_code(current_user)
|
|
}
|
|
|
|
@api_router.get("/config")
|
|
async def get_config():
|
|
"""
|
|
Get public configuration values
|
|
Returns: max_file_size_bytes, max_file_size_mb
|
|
"""
|
|
max_file_size_bytes = int(os.getenv('MAX_FILE_SIZE_BYTES', 52428800)) # Default 50MB
|
|
max_file_size_mb = max_file_size_bytes / (1024 * 1024)
|
|
|
|
return {
|
|
"max_file_size_bytes": max_file_size_bytes,
|
|
"max_file_size_mb": int(max_file_size_mb)
|
|
}
|
|
|
|
@api_router.get("/diagnostics/cors")
|
|
async def cors_diagnostics(request: Request):
|
|
"""
|
|
CORS Diagnostics Endpoint
|
|
Shows current CORS configuration and request details for debugging
|
|
|
|
Use this to verify:
|
|
1. What origins are allowed
|
|
2. What origin is making the request
|
|
3. Whether CORS is properly configured
|
|
"""
|
|
cors_origins_env = os.environ.get('CORS_ORIGINS', '')
|
|
|
|
if cors_origins_env:
|
|
configured_origins = [origin.strip() for origin in cors_origins_env.split(',')]
|
|
cors_status = "✅ CONFIGURED"
|
|
else:
|
|
configured_origins = [
|
|
"http://localhost:3000",
|
|
"http://localhost:8000",
|
|
"http://127.0.0.1:3000",
|
|
"http://127.0.0.1:8000"
|
|
]
|
|
cors_status = "⚠️ NOT CONFIGURED (using defaults)"
|
|
|
|
request_origin = request.headers.get('origin', 'None')
|
|
origin_allowed = request_origin in configured_origins
|
|
|
|
return {
|
|
"cors_status": cors_status,
|
|
"environment": ENVIRONMENT,
|
|
"cors_origins_env_variable": cors_origins_env or "(not set)",
|
|
"allowed_origins": configured_origins,
|
|
"request_origin": request_origin,
|
|
"origin_allowed": origin_allowed,
|
|
"diagnosis": {
|
|
"cors_configured": bool(cors_origins_env),
|
|
"origin_matches": origin_allowed,
|
|
"issue": None if origin_allowed else f"Origin '{request_origin}' is not in allowed origins list"
|
|
},
|
|
"fix_instructions": None if origin_allowed else (
|
|
f"Add to backend .env file:\n"
|
|
f"CORS_ORIGINS={request_origin}"
|
|
f"{(',' + ','.join(configured_origins)) if cors_origins_env else ''}"
|
|
)
|
|
}
|
|
|
|
# User Profile Routes
|
|
@api_router.get("/users/profile", response_model=UserResponse)
|
|
async def get_profile(current_user: User = Depends(get_current_user)):
|
|
# Use from_attributes to automatically map all User fields to UserResponse
|
|
return UserResponse.model_validate(current_user)
|
|
|
|
@api_router.put("/users/profile")
|
|
async def update_profile(
|
|
request: UpdateProfileRequest,
|
|
current_user: User = Depends(get_current_user),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Update user profile with basic info, partner details, newsletter prefs, volunteer interests, and directory settings."""
|
|
|
|
# Basic personal information
|
|
if request.first_name is not None:
|
|
current_user.first_name = request.first_name
|
|
if request.last_name is not None:
|
|
current_user.last_name = request.last_name
|
|
if request.phone is not None:
|
|
current_user.phone = request.phone
|
|
if request.address is not None:
|
|
current_user.address = request.address
|
|
if request.city is not None:
|
|
current_user.city = request.city
|
|
if request.state is not None:
|
|
current_user.state = request.state
|
|
if request.zipcode is not None:
|
|
current_user.zipcode = request.zipcode
|
|
|
|
# Partner information
|
|
if request.partner_first_name is not None:
|
|
current_user.partner_first_name = request.partner_first_name
|
|
if request.partner_last_name is not None:
|
|
current_user.partner_last_name = request.partner_last_name
|
|
if request.partner_is_member is not None:
|
|
current_user.partner_is_member = request.partner_is_member
|
|
if request.partner_plan_to_become_member is not None:
|
|
current_user.partner_plan_to_become_member = request.partner_plan_to_become_member
|
|
|
|
# Newsletter preferences
|
|
if request.newsletter_publish_name is not None:
|
|
current_user.newsletter_publish_name = request.newsletter_publish_name
|
|
if request.newsletter_publish_photo is not None:
|
|
current_user.newsletter_publish_photo = request.newsletter_publish_photo
|
|
if request.newsletter_publish_birthday is not None:
|
|
current_user.newsletter_publish_birthday = request.newsletter_publish_birthday
|
|
if request.newsletter_publish_none is not None:
|
|
current_user.newsletter_publish_none = request.newsletter_publish_none
|
|
|
|
# Volunteer interests (array)
|
|
if request.volunteer_interests is not None:
|
|
current_user.volunteer_interests = request.volunteer_interests
|
|
|
|
# Directory settings
|
|
if request.show_in_directory is not None:
|
|
current_user.show_in_directory = request.show_in_directory
|
|
if request.directory_email is not None:
|
|
current_user.directory_email = request.directory_email
|
|
if request.directory_bio is not None:
|
|
current_user.directory_bio = request.directory_bio
|
|
if request.directory_address is not None:
|
|
current_user.directory_address = request.directory_address
|
|
if request.directory_phone is not None:
|
|
current_user.directory_phone = request.directory_phone
|
|
if request.directory_dob is not None:
|
|
current_user.directory_dob = request.directory_dob
|
|
if request.directory_partner_name is not None:
|
|
current_user.directory_partner_name = request.directory_partner_name
|
|
|
|
current_user.updated_at = datetime.now(timezone.utc)
|
|
|
|
db.commit()
|
|
db.refresh(current_user)
|
|
|
|
return {"message": "Profile updated successfully"}
|
|
|
|
# ==================== MEMBERS ONLY ROUTES ====================
|
|
|
|
# Member Directory Routes
|
|
@api_router.get("/members/directory")
|
|
async def get_member_directory(
|
|
search: Optional[str] = None,
|
|
current_user: User = Depends(get_active_member),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Get list of all active users (members and staff) who opted into the directory.
|
|
|
|
Includes members, admins, finance, and superadmins who have:
|
|
- show_in_directory = True
|
|
- status = active
|
|
"""
|
|
query = db.query(User).filter(
|
|
User.show_in_directory == True,
|
|
User.status == UserStatus.active
|
|
)
|
|
|
|
# Optional search filter
|
|
if search:
|
|
search_term = f"%{search}%"
|
|
query = query.filter(
|
|
or_(
|
|
User.first_name.ilike(search_term),
|
|
User.last_name.ilike(search_term),
|
|
User.directory_bio.ilike(search_term)
|
|
)
|
|
)
|
|
|
|
directory_members = query.order_by(User.first_name, User.last_name).all()
|
|
|
|
return [{
|
|
"id": str(member.id),
|
|
"first_name": member.first_name,
|
|
"last_name": member.last_name,
|
|
"role": member.role.value if member.role else None,
|
|
"profile_photo_url": member.profile_photo_url,
|
|
"directory_email": member.directory_email,
|
|
"directory_bio": member.directory_bio,
|
|
"directory_address": member.directory_address,
|
|
"directory_phone": member.directory_phone,
|
|
"directory_dob": member.directory_dob.isoformat() if member.directory_dob else None,
|
|
"directory_partner_name": member.directory_partner_name,
|
|
"volunteer_interests": member.volunteer_interests or [],
|
|
"social_media_facebook": member.social_media_facebook,
|
|
"social_media_instagram": member.social_media_instagram,
|
|
"social_media_twitter": member.social_media_twitter,
|
|
"social_media_linkedin": member.social_media_linkedin,
|
|
"member_since": member.member_since.isoformat() if member.member_since else None,
|
|
"created_at": member.created_at.isoformat() if member.created_at else None
|
|
} for member in directory_members]
|
|
|
|
@api_router.get("/members/directory/{user_id}")
|
|
async def get_directory_member_profile(
|
|
user_id: str,
|
|
current_user: User = Depends(get_active_member),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Get public directory profile of a specific member or staff"""
|
|
member = db.query(User).filter(
|
|
User.id == user_id,
|
|
User.show_in_directory == True,
|
|
User.status == UserStatus.active
|
|
).first()
|
|
|
|
if not member:
|
|
raise HTTPException(status_code=404, detail="Member not found in directory")
|
|
|
|
return {
|
|
"id": str(member.id),
|
|
"first_name": member.first_name,
|
|
"last_name": member.last_name,
|
|
"role": member.role.value if member.role else None,
|
|
"profile_photo_url": member.profile_photo_url,
|
|
"directory_email": member.directory_email,
|
|
"directory_bio": member.directory_bio,
|
|
"directory_address": member.directory_address,
|
|
"directory_phone": member.directory_phone,
|
|
"directory_dob": member.directory_dob.isoformat() if member.directory_dob else None,
|
|
"directory_partner_name": member.directory_partner_name,
|
|
"volunteer_interests": member.volunteer_interests or [],
|
|
"social_media_facebook": member.social_media_facebook,
|
|
"social_media_instagram": member.social_media_instagram,
|
|
"social_media_twitter": member.social_media_twitter,
|
|
"social_media_linkedin": member.social_media_linkedin,
|
|
"member_since": member.member_since.isoformat() if member.member_since else None,
|
|
"created_at": member.created_at.isoformat() if member.created_at else None
|
|
}
|
|
|
|
# Enhanced Profile Routes (Active Members Only)
|
|
@api_router.get("/members/profile")
|
|
async def get_enhanced_profile(
|
|
current_user: User = Depends(get_active_member),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Get enhanced profile with all member-only fields"""
|
|
return {
|
|
"id": str(current_user.id),
|
|
"email": current_user.email,
|
|
"first_name": current_user.first_name,
|
|
"last_name": current_user.last_name,
|
|
"phone": current_user.phone,
|
|
"address": current_user.address,
|
|
"city": current_user.city,
|
|
"state": current_user.state,
|
|
"zipcode": current_user.zipcode,
|
|
"date_of_birth": current_user.date_of_birth,
|
|
"profile_photo_url": current_user.profile_photo_url,
|
|
"social_media_facebook": current_user.social_media_facebook,
|
|
"social_media_instagram": current_user.social_media_instagram,
|
|
"social_media_twitter": current_user.social_media_twitter,
|
|
"social_media_linkedin": current_user.social_media_linkedin,
|
|
"show_in_directory": current_user.show_in_directory,
|
|
"directory_email": current_user.directory_email,
|
|
"directory_bio": current_user.directory_bio,
|
|
"directory_address": current_user.directory_address,
|
|
"directory_phone": current_user.directory_phone,
|
|
"directory_dob": current_user.directory_dob,
|
|
"directory_partner_name": current_user.directory_partner_name,
|
|
"status": current_user.status.value,
|
|
"role": get_user_role_code(current_user)
|
|
}
|
|
|
|
@api_router.put("/members/profile")
|
|
async def update_enhanced_profile(
|
|
request: EnhancedProfileUpdateRequest,
|
|
current_user: User = Depends(get_active_member),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Update enhanced profile with social media and directory settings"""
|
|
if request.social_media_facebook is not None:
|
|
current_user.social_media_facebook = request.social_media_facebook
|
|
if request.social_media_instagram is not None:
|
|
current_user.social_media_instagram = request.social_media_instagram
|
|
if request.social_media_twitter is not None:
|
|
current_user.social_media_twitter = request.social_media_twitter
|
|
if request.social_media_linkedin is not None:
|
|
current_user.social_media_linkedin = request.social_media_linkedin
|
|
if request.show_in_directory is not None:
|
|
current_user.show_in_directory = request.show_in_directory
|
|
if request.directory_email is not None:
|
|
current_user.directory_email = request.directory_email
|
|
if request.directory_bio is not None:
|
|
current_user.directory_bio = request.directory_bio
|
|
if request.directory_address is not None:
|
|
current_user.directory_address = request.directory_address
|
|
if request.directory_phone is not None:
|
|
current_user.directory_phone = request.directory_phone
|
|
if request.directory_dob is not None:
|
|
current_user.directory_dob = request.directory_dob
|
|
if request.directory_partner_name is not None:
|
|
current_user.directory_partner_name = request.directory_partner_name
|
|
|
|
current_user.updated_at = datetime.now(timezone.utc)
|
|
db.commit()
|
|
db.refresh(current_user)
|
|
|
|
return {"message": "Enhanced profile updated successfully"}
|
|
|
|
@api_router.post("/members/profile/upload-photo")
|
|
async def upload_profile_photo(
|
|
file: UploadFile = File(...),
|
|
current_user: User = Depends(get_active_member),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Upload profile photo to Cloudflare R2"""
|
|
r2 = get_r2_storage()
|
|
|
|
# Get storage quota
|
|
storage = db.query(StorageUsage).first()
|
|
if not storage:
|
|
# Initialize storage tracking
|
|
storage = StorageUsage(
|
|
total_bytes_used=0,
|
|
max_bytes_allowed=int(os.getenv('MAX_STORAGE_BYTES', 10737418240))
|
|
)
|
|
db.add(storage)
|
|
db.commit()
|
|
db.refresh(storage)
|
|
|
|
# Get max file size from env
|
|
max_file_size = int(os.getenv('MAX_FILE_SIZE_BYTES', 52428800))
|
|
|
|
# Delete old profile photo if exists
|
|
if current_user.profile_photo_url:
|
|
# Extract object key from URL
|
|
old_key = current_user.profile_photo_url.split('/')[-1]
|
|
old_key = f"profiles/{old_key}"
|
|
try:
|
|
old_size = await r2.get_file_size(old_key)
|
|
await r2.delete_file(old_key)
|
|
# Update storage usage
|
|
storage.total_bytes_used -= old_size
|
|
except:
|
|
pass # File might not exist
|
|
|
|
# Upload new photo
|
|
try:
|
|
public_url, object_key, file_size = await r2.upload_file(
|
|
file=file,
|
|
folder="profiles",
|
|
allowed_types=r2.ALLOWED_IMAGE_TYPES,
|
|
max_size_bytes=max_file_size
|
|
)
|
|
|
|
# Check storage quota
|
|
if storage.total_bytes_used + file_size > storage.max_bytes_allowed:
|
|
# Rollback upload
|
|
await r2.delete_file(object_key)
|
|
raise HTTPException(
|
|
status_code=507,
|
|
detail=f"Storage limit exceeded. Used: {storage.total_bytes_used / (1024**3):.2f}GB, Limit: {storage.max_bytes_allowed / (1024**3):.2f}GB"
|
|
)
|
|
|
|
# Update user profile
|
|
current_user.profile_photo_url = public_url
|
|
current_user.updated_at = datetime.now(timezone.utc)
|
|
|
|
# Update storage usage
|
|
storage.total_bytes_used += file_size
|
|
storage.last_updated = datetime.now(timezone.utc)
|
|
|
|
db.commit()
|
|
db.refresh(current_user)
|
|
|
|
logger.info(f"Profile photo uploaded for user {current_user.email}: {file_size} bytes")
|
|
|
|
return {
|
|
"message": "Profile photo uploaded successfully",
|
|
"profile_photo_url": public_url
|
|
}
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"Error uploading profile photo: {str(e)}")
|
|
raise HTTPException(status_code=500, detail=f"Upload failed: {str(e)}")
|
|
|
|
@api_router.delete("/members/profile/delete-photo")
|
|
async def delete_profile_photo(
|
|
current_user: User = Depends(get_active_member),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Delete profile photo from R2 and profile"""
|
|
if not current_user.profile_photo_url:
|
|
raise HTTPException(status_code=404, detail="No profile photo to delete")
|
|
|
|
r2 = get_r2_storage()
|
|
storage = db.query(StorageUsage).first()
|
|
|
|
# Extract object key from URL
|
|
object_key = current_user.profile_photo_url.split('/')[-1]
|
|
object_key = f"profiles/{object_key}"
|
|
|
|
try:
|
|
file_size = await r2.get_file_size(object_key)
|
|
await r2.delete_file(object_key)
|
|
|
|
# Update storage usage
|
|
if storage:
|
|
storage.total_bytes_used -= file_size
|
|
storage.last_updated = datetime.now(timezone.utc)
|
|
|
|
# Update user profile
|
|
current_user.profile_photo_url = None
|
|
current_user.updated_at = datetime.now(timezone.utc)
|
|
|
|
db.commit()
|
|
|
|
logger.info(f"Profile photo deleted for user {current_user.email}")
|
|
|
|
return {"message": "Profile photo deleted successfully"}
|
|
except Exception as e:
|
|
logger.error(f"Error deleting profile photo: {str(e)}")
|
|
raise HTTPException(status_code=500, detail=f"Deletion failed: {str(e)}")
|
|
|
|
# Calendar Routes (Active Members Only)
|
|
@api_router.get("/members/calendar/events", response_model=List[CalendarEventResponse])
|
|
async def get_calendar_events(
|
|
start_date: Optional[datetime] = None,
|
|
end_date: Optional[datetime] = None,
|
|
current_user: User = Depends(get_active_member),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Get calendar events with user RSVP status"""
|
|
query = db.query(Event).filter(Event.published == True)
|
|
|
|
if start_date:
|
|
query = query.filter(Event.start_at >= start_date)
|
|
if end_date:
|
|
query = query.filter(Event.end_at <= end_date)
|
|
|
|
events = query.order_by(Event.start_at).all()
|
|
|
|
result = []
|
|
for event in events:
|
|
# Get user's RSVP status for this event
|
|
rsvp = db.query(EventRSVP).filter(
|
|
EventRSVP.event_id == event.id,
|
|
EventRSVP.user_id == current_user.id
|
|
).first()
|
|
|
|
user_rsvp_status = rsvp.rsvp_status.value if rsvp else None
|
|
|
|
result.append(CalendarEventResponse(
|
|
id=str(event.id),
|
|
title=event.title,
|
|
description=event.description,
|
|
start_at=event.start_at,
|
|
end_at=event.end_at,
|
|
location=event.location,
|
|
capacity=event.capacity,
|
|
user_rsvp_status=user_rsvp_status,
|
|
microsoft_calendar_synced=event.microsoft_calendar_sync_enabled
|
|
))
|
|
|
|
return result
|
|
|
|
# Admin Calendar Sync Routes
|
|
@api_router.post("/admin/calendar/sync/{event_id}")
|
|
async def sync_event_to_microsoft(
|
|
event_id: str,
|
|
current_user: User = Depends(require_permission("events.edit")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Sync event to Microsoft Calendar"""
|
|
event = db.query(Event).filter(Event.id == event_id).first()
|
|
if not event:
|
|
raise HTTPException(status_code=404, detail="Event not found")
|
|
|
|
ms_calendar = get_ms_calendar_service()
|
|
|
|
try:
|
|
# Sync event
|
|
ms_event_id = await ms_calendar.sync_event(
|
|
loaf_event=event,
|
|
existing_ms_event_id=event.microsoft_calendar_id
|
|
)
|
|
|
|
# Update event with MS Calendar ID
|
|
event.microsoft_calendar_id = ms_event_id
|
|
event.microsoft_calendar_sync_enabled = True
|
|
event.updated_at = datetime.now(timezone.utc)
|
|
|
|
db.commit()
|
|
|
|
logger.info(f"Event {event.title} synced to Microsoft Calendar by {current_user.email}")
|
|
|
|
return {
|
|
"message": "Event synced to Microsoft Calendar successfully",
|
|
"microsoft_calendar_id": ms_event_id
|
|
}
|
|
except Exception as e:
|
|
logger.error(f"Error syncing event to Microsoft Calendar: {str(e)}")
|
|
raise HTTPException(status_code=500, detail=f"Sync failed: {str(e)}")
|
|
|
|
@api_router.delete("/admin/calendar/unsync/{event_id}")
|
|
async def unsync_event_from_microsoft(
|
|
event_id: str,
|
|
current_user: User = Depends(require_permission("events.edit")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Remove event from Microsoft Calendar"""
|
|
event = db.query(Event).filter(Event.id == event_id).first()
|
|
if not event:
|
|
raise HTTPException(status_code=404, detail="Event not found")
|
|
|
|
if not event.microsoft_calendar_id:
|
|
raise HTTPException(status_code=400, detail="Event is not synced to Microsoft Calendar")
|
|
|
|
ms_calendar = get_ms_calendar_service()
|
|
|
|
try:
|
|
# Delete from Microsoft Calendar
|
|
await ms_calendar.delete_event(event.microsoft_calendar_id)
|
|
|
|
# Update event
|
|
event.microsoft_calendar_id = None
|
|
event.microsoft_calendar_sync_enabled = False
|
|
event.updated_at = datetime.now(timezone.utc)
|
|
|
|
db.commit()
|
|
|
|
logger.info(f"Event {event.title} unsynced from Microsoft Calendar by {current_user.email}")
|
|
|
|
return {"message": "Event removed from Microsoft Calendar successfully"}
|
|
except Exception as e:
|
|
logger.error(f"Error removing event from Microsoft Calendar: {str(e)}")
|
|
raise HTTPException(status_code=500, detail=f"Unsync failed: {str(e)}")
|
|
|
|
# Event Gallery Routes (Members Only)
|
|
@api_router.get("/members/gallery")
|
|
async def get_events_with_galleries(
|
|
current_user: User = Depends(get_active_member),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Get all events that have gallery images"""
|
|
# Get events that have at least one gallery image
|
|
events_with_galleries = db.query(Event).join(EventGallery).filter(
|
|
Event.published == True
|
|
).distinct().order_by(Event.start_at.desc()).all()
|
|
|
|
result = []
|
|
for event in events_with_galleries:
|
|
gallery_count = db.query(EventGallery).filter(
|
|
EventGallery.event_id == event.id
|
|
).count()
|
|
|
|
# Get first image as thumbnail
|
|
first_image = db.query(EventGallery).filter(
|
|
EventGallery.event_id == event.id
|
|
).order_by(EventGallery.created_at).first()
|
|
|
|
result.append({
|
|
"id": str(event.id),
|
|
"title": event.title,
|
|
"description": event.description,
|
|
"start_at": event.start_at,
|
|
"location": event.location,
|
|
"gallery_count": gallery_count,
|
|
"thumbnail_url": first_image.image_url if first_image else None
|
|
})
|
|
|
|
return result
|
|
|
|
@api_router.get("/events/{event_id}/gallery")
|
|
async def get_event_gallery(
|
|
event_id: str,
|
|
current_user: User = Depends(get_active_member),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Get all gallery images for a specific event"""
|
|
event = db.query(Event).filter(Event.id == event_id).first()
|
|
if not event:
|
|
raise HTTPException(status_code=404, detail="Event not found")
|
|
|
|
gallery_images = db.query(EventGallery).filter(
|
|
EventGallery.event_id == event_id
|
|
).order_by(EventGallery.created_at.desc()).all()
|
|
|
|
return [
|
|
{
|
|
"id": str(img.id),
|
|
"image_url": img.image_url,
|
|
"image_key": img.image_key,
|
|
"caption": img.caption,
|
|
"uploaded_by": str(img.uploaded_by),
|
|
"file_size_bytes": img.file_size_bytes,
|
|
"created_at": img.created_at
|
|
}
|
|
for img in gallery_images
|
|
]
|
|
|
|
# Admin Event Gallery Routes
|
|
@api_router.post("/admin/events/{event_id}/gallery")
|
|
async def upload_event_gallery_image(
|
|
event_id: str,
|
|
file: UploadFile = File(...),
|
|
caption: Optional[str] = None,
|
|
current_user: User = Depends(require_permission("gallery.upload")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Upload image to event gallery (Admin only)"""
|
|
# Validate event exists
|
|
event = db.query(Event).filter(Event.id == event_id).first()
|
|
if not event:
|
|
raise HTTPException(status_code=404, detail="Event not found")
|
|
|
|
r2 = get_r2_storage()
|
|
|
|
# Get storage quota
|
|
storage = db.query(StorageUsage).first()
|
|
if not storage:
|
|
storage = StorageUsage(
|
|
total_bytes_used=0,
|
|
max_bytes_allowed=int(os.getenv('MAX_STORAGE_BYTES', 10737418240))
|
|
)
|
|
db.add(storage)
|
|
db.commit()
|
|
db.refresh(storage)
|
|
|
|
# Get max file size from env
|
|
max_file_size = int(os.getenv('MAX_FILE_SIZE_BYTES', 52428800))
|
|
|
|
try:
|
|
# Upload to R2
|
|
public_url, object_key, file_size = await r2.upload_file(
|
|
file=file,
|
|
folder=f"gallery/{event_id}",
|
|
allowed_types=r2.ALLOWED_IMAGE_TYPES,
|
|
max_size_bytes=max_file_size
|
|
)
|
|
|
|
# Check storage quota
|
|
if storage.total_bytes_used + file_size > storage.max_bytes_allowed:
|
|
# Rollback upload
|
|
await r2.delete_file(object_key)
|
|
raise HTTPException(
|
|
status_code=507,
|
|
detail=f"Storage limit exceeded. Used: {storage.total_bytes_used / (1024**3):.2f}GB, Limit: {storage.max_bytes_allowed / (1024**3):.2f}GB"
|
|
)
|
|
|
|
# Create gallery record
|
|
gallery_image = EventGallery(
|
|
event_id=event.id,
|
|
image_url=public_url,
|
|
image_key=object_key,
|
|
caption=caption,
|
|
uploaded_by=current_user.id,
|
|
file_size_bytes=file_size
|
|
)
|
|
db.add(gallery_image)
|
|
|
|
# Update storage usage
|
|
storage.total_bytes_used += file_size
|
|
storage.last_updated = datetime.now(timezone.utc)
|
|
|
|
db.commit()
|
|
db.refresh(gallery_image)
|
|
|
|
logger.info(f"Gallery image uploaded for event {event.title} by {current_user.email}: {file_size} bytes")
|
|
|
|
return {
|
|
"message": "Image uploaded successfully",
|
|
"image": {
|
|
"id": str(gallery_image.id),
|
|
"image_url": gallery_image.image_url,
|
|
"caption": gallery_image.caption,
|
|
"file_size_bytes": gallery_image.file_size_bytes
|
|
}
|
|
}
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"Error uploading gallery image: {str(e)}")
|
|
raise HTTPException(status_code=500, detail=f"Upload failed: {str(e)}")
|
|
|
|
@api_router.delete("/admin/event-gallery/{image_id}")
|
|
async def delete_gallery_image(
|
|
image_id: str,
|
|
current_user: User = Depends(require_permission("gallery.delete")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Delete image from event gallery (Admin only)"""
|
|
gallery_image = db.query(EventGallery).filter(EventGallery.id == image_id).first()
|
|
if not gallery_image:
|
|
raise HTTPException(status_code=404, detail="Gallery image not found")
|
|
|
|
r2 = get_r2_storage()
|
|
storage = db.query(StorageUsage).first()
|
|
|
|
try:
|
|
# Delete from R2
|
|
await r2.delete_file(gallery_image.image_key)
|
|
|
|
# Update storage usage
|
|
if storage:
|
|
storage.total_bytes_used -= gallery_image.file_size_bytes
|
|
storage.last_updated = datetime.now(timezone.utc)
|
|
|
|
# Delete from database
|
|
db.delete(gallery_image)
|
|
db.commit()
|
|
|
|
logger.info(f"Gallery image deleted by {current_user.email}: {gallery_image.image_key}")
|
|
|
|
return {"message": "Image deleted successfully"}
|
|
except Exception as e:
|
|
logger.error(f"Error deleting gallery image: {str(e)}")
|
|
raise HTTPException(status_code=500, detail=f"Deletion failed: {str(e)}")
|
|
|
|
@api_router.put("/admin/event-gallery/{image_id}")
|
|
async def update_gallery_image_caption(
|
|
image_id: str,
|
|
caption: str,
|
|
current_user: User = Depends(require_permission("gallery.edit")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Update gallery image caption (Admin only)"""
|
|
gallery_image = db.query(EventGallery).filter(EventGallery.id == image_id).first()
|
|
if not gallery_image:
|
|
raise HTTPException(status_code=404, detail="Gallery image not found")
|
|
|
|
gallery_image.caption = caption
|
|
db.commit()
|
|
db.refresh(gallery_image)
|
|
|
|
return {
|
|
"message": "Caption updated successfully",
|
|
"image": {
|
|
"id": str(gallery_image.id),
|
|
"caption": gallery_image.caption
|
|
}
|
|
}
|
|
|
|
# Event Routes
|
|
@api_router.get("/events", response_model=List[EventResponse])
|
|
async def get_events(
|
|
current_user: User = Depends(get_active_member),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
# Get published events for all users
|
|
events = db.query(Event).filter(Event.published == True).order_by(Event.start_at).all()
|
|
|
|
result = []
|
|
for event in events:
|
|
rsvp_count = db.query(EventRSVP).filter(
|
|
EventRSVP.event_id == event.id,
|
|
EventRSVP.rsvp_status == RSVPStatus.yes
|
|
).count()
|
|
|
|
# Get current user's RSVP status for this event
|
|
user_rsvp = db.query(EventRSVP).filter(
|
|
EventRSVP.event_id == event.id,
|
|
EventRSVP.user_id == current_user.id
|
|
).first()
|
|
|
|
user_rsvp_status = user_rsvp.rsvp_status.value if user_rsvp else None
|
|
|
|
result.append(EventResponse(
|
|
id=str(event.id),
|
|
title=event.title,
|
|
description=event.description,
|
|
start_at=event.start_at,
|
|
end_at=event.end_at,
|
|
location=event.location,
|
|
capacity=event.capacity,
|
|
published=event.published,
|
|
created_by=str(event.created_by),
|
|
created_at=event.created_at,
|
|
rsvp_count=rsvp_count,
|
|
user_rsvp_status=user_rsvp_status
|
|
))
|
|
|
|
return result
|
|
|
|
@api_router.get("/events/{event_id}", response_model=EventResponse)
|
|
async def get_event(
|
|
event_id: str,
|
|
current_user: User = Depends(get_active_member),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
event = db.query(Event).filter(Event.id == event_id).first()
|
|
if not event:
|
|
raise HTTPException(status_code=404, detail="Event not found")
|
|
|
|
rsvp_count = db.query(EventRSVP).filter(
|
|
EventRSVP.event_id == event.id,
|
|
EventRSVP.rsvp_status == RSVPStatus.yes
|
|
).count()
|
|
|
|
# Get current user's RSVP status for this event
|
|
user_rsvp = db.query(EventRSVP).filter(
|
|
EventRSVP.event_id == event_id,
|
|
EventRSVP.user_id == current_user.id
|
|
).first()
|
|
|
|
user_rsvp_status = user_rsvp.rsvp_status.value if user_rsvp else None
|
|
|
|
return EventResponse(
|
|
id=str(event.id),
|
|
title=event.title,
|
|
description=event.description,
|
|
start_at=event.start_at,
|
|
end_at=event.end_at,
|
|
location=event.location,
|
|
capacity=event.capacity,
|
|
published=event.published,
|
|
created_by=str(event.created_by),
|
|
created_at=event.created_at,
|
|
rsvp_count=rsvp_count,
|
|
user_rsvp_status=user_rsvp_status
|
|
)
|
|
|
|
@api_router.post("/events/{event_id}/rsvp")
|
|
async def rsvp_to_event(
|
|
event_id: str,
|
|
request: RSVPRequest,
|
|
current_user: User = Depends(get_active_member),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
event = db.query(Event).filter(Event.id == event_id).first()
|
|
if not event:
|
|
raise HTTPException(status_code=404, detail="Event not found")
|
|
|
|
# Check if RSVP already exists
|
|
existing_rsvp = db.query(EventRSVP).filter(
|
|
EventRSVP.event_id == event_id,
|
|
EventRSVP.user_id == current_user.id
|
|
).first()
|
|
|
|
if existing_rsvp:
|
|
existing_rsvp.rsvp_status = RSVPStatus(request.rsvp_status)
|
|
existing_rsvp.updated_at = datetime.now(timezone.utc)
|
|
else:
|
|
rsvp = EventRSVP(
|
|
event_id=event.id,
|
|
user_id=current_user.id,
|
|
rsvp_status=RSVPStatus(request.rsvp_status)
|
|
)
|
|
db.add(rsvp)
|
|
|
|
db.commit()
|
|
|
|
return {"message": "RSVP updated successfully"}
|
|
|
|
@api_router.get("/members/event-activity")
|
|
async def get_my_event_activity(
|
|
current_user: User = Depends(get_active_member),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Get current user's event activity including upcoming RSVPs and attendance history
|
|
"""
|
|
# Get all user's RSVPs
|
|
rsvps = db.query(EventRSVP).filter(
|
|
EventRSVP.user_id == current_user.id
|
|
).order_by(EventRSVP.created_at.desc()).all()
|
|
|
|
# Categorize events
|
|
upcoming_events = []
|
|
past_events = []
|
|
now = datetime.now(timezone.utc)
|
|
|
|
for rsvp in rsvps:
|
|
event = db.query(Event).filter(Event.id == rsvp.event_id).first()
|
|
if not event:
|
|
continue
|
|
|
|
event_data = {
|
|
"id": str(event.id),
|
|
"title": event.title,
|
|
"description": event.description,
|
|
"location": event.location,
|
|
"start_at": event.start_at.isoformat(),
|
|
"end_at": event.end_at.isoformat(),
|
|
"rsvp_status": rsvp.rsvp_status.value,
|
|
"attended": rsvp.attended,
|
|
"attended_at": rsvp.attended_at.isoformat() if rsvp.attended_at else None
|
|
}
|
|
|
|
# Separate upcoming vs past events
|
|
# Ensure timezone-aware comparison
|
|
event_end_at = event.end_at.replace(tzinfo=timezone.utc) if event.end_at.tzinfo is None else event.end_at
|
|
if event_end_at > now:
|
|
upcoming_events.append(event_data)
|
|
else:
|
|
past_events.append(event_data)
|
|
|
|
return {
|
|
"upcoming_events": sorted(upcoming_events, key=lambda x: x["start_at"]),
|
|
"past_events": sorted(past_events, key=lambda x: x["start_at"], reverse=True),
|
|
"total_attended": sum(1 for rsvp in rsvps if rsvp.attended),
|
|
"total_rsvps": len(rsvps)
|
|
}
|
|
|
|
# ============================================================================
|
|
# Member Transaction History Endpoint
|
|
# ============================================================================
|
|
@api_router.get("/members/transactions")
|
|
async def get_member_transactions(
|
|
current_user: User = Depends(get_active_member),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Get current member's transaction history including subscriptions and donations.
|
|
Returns both types of transactions sorted by date (newest first).
|
|
"""
|
|
# Get user's subscriptions with plan details
|
|
subscriptions = db.query(Subscription).filter(
|
|
Subscription.user_id == current_user.id
|
|
).order_by(Subscription.created_at.desc()).all()
|
|
|
|
subscription_list = []
|
|
for sub in subscriptions:
|
|
plan = db.query(SubscriptionPlan).filter(SubscriptionPlan.id == sub.plan_id).first()
|
|
subscription_list.append({
|
|
"id": str(sub.id),
|
|
"type": "subscription",
|
|
"description": plan.name if plan else "Subscription",
|
|
"amount_cents": sub.amount_paid_cents or (sub.base_subscription_cents + sub.donation_cents),
|
|
"base_amount_cents": sub.base_subscription_cents,
|
|
"donation_cents": sub.donation_cents,
|
|
"status": sub.status.value if sub.status else "unknown",
|
|
"payment_method": sub.payment_method,
|
|
"card_brand": sub.card_brand,
|
|
"card_last4": sub.card_last4,
|
|
"stripe_receipt_url": sub.stripe_receipt_url,
|
|
"created_at": sub.created_at.isoformat() if sub.created_at else None,
|
|
"payment_completed_at": sub.payment_completed_at.isoformat() if sub.payment_completed_at else None,
|
|
"start_date": sub.start_date.isoformat() if sub.start_date else None,
|
|
"end_date": sub.end_date.isoformat() if sub.end_date else None,
|
|
"billing_cycle": plan.billing_cycle if plan else None,
|
|
"manual_payment": sub.manual_payment
|
|
})
|
|
|
|
# Get user's donations
|
|
donations = db.query(Donation).filter(
|
|
Donation.user_id == current_user.id
|
|
).order_by(Donation.created_at.desc()).all()
|
|
|
|
donation_list = []
|
|
for don in donations:
|
|
donation_list.append({
|
|
"id": str(don.id),
|
|
"type": "donation",
|
|
"description": "Donation",
|
|
"amount_cents": don.amount_cents,
|
|
"status": don.status.value if don.status else "unknown",
|
|
"payment_method": don.payment_method,
|
|
"card_brand": don.card_brand,
|
|
"card_last4": don.card_last4,
|
|
"stripe_receipt_url": don.stripe_receipt_url,
|
|
"created_at": don.created_at.isoformat() if don.created_at else None,
|
|
"payment_completed_at": don.payment_completed_at.isoformat() if don.payment_completed_at else None,
|
|
"notes": don.notes
|
|
})
|
|
|
|
return {
|
|
"subscriptions": subscription_list,
|
|
"donations": donation_list,
|
|
"total_subscription_amount_cents": sum(s["amount_cents"] or 0 for s in subscription_list),
|
|
"total_donation_amount_cents": sum(d["amount_cents"] or 0 for d in donation_list)
|
|
}
|
|
|
|
# ============================================================================
|
|
# Calendar Export Endpoints (Universal iCalendar .ics format)
|
|
# ============================================================================
|
|
|
|
@api_router.get("/events/{event_id}/download.ics")
|
|
async def download_event_ics(
|
|
event_id: str,
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Download single event as .ics file (RFC 5545 iCalendar format)
|
|
No authentication required for published events
|
|
Works with Google Calendar, Apple Calendar, Microsoft Outlook, etc.
|
|
"""
|
|
event = db.query(Event).filter(
|
|
Event.id == event_id,
|
|
Event.published == True
|
|
).first()
|
|
|
|
if not event:
|
|
raise HTTPException(status_code=404, detail="Event not found")
|
|
|
|
# Generate UID if not exists
|
|
if not event.calendar_uid:
|
|
event.calendar_uid = calendar_service.generate_event_uid()
|
|
db.commit()
|
|
|
|
ics_content = calendar_service.create_single_event_calendar(event)
|
|
|
|
# Sanitize filename
|
|
safe_filename = "".join(c for c in event.title if c.isalnum() or c in (' ', '-', '_')).rstrip()
|
|
safe_filename = safe_filename.replace(' ', '_') or 'event'
|
|
|
|
return StreamingResponse(
|
|
iter([ics_content]),
|
|
media_type="text/calendar",
|
|
headers={
|
|
"Content-Disposition": f"attachment; filename={safe_filename}.ics",
|
|
"Cache-Control": "public, max-age=300" # Cache for 5 minutes
|
|
}
|
|
)
|
|
|
|
@api_router.get("/calendars/subscribe.ics")
|
|
async def subscribe_calendar(
|
|
current_user: User = Depends(get_active_member),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Subscribe to user's RSVP'd events (live calendar feed)
|
|
Auto-syncs events marked as "Yes" RSVP
|
|
Use webcal:// protocol for auto-sync in calendar apps
|
|
"""
|
|
# Get all upcoming events user RSVP'd "yes" to
|
|
rsvps = db.query(EventRSVP).filter(
|
|
EventRSVP.user_id == current_user.id,
|
|
EventRSVP.rsvp_status == RSVPStatus.yes
|
|
).join(Event).filter(
|
|
Event.start_at > datetime.now(timezone.utc),
|
|
Event.published == True
|
|
).all()
|
|
|
|
events = [rsvp.event for rsvp in rsvps]
|
|
|
|
# Generate UIDs for events that don't have them
|
|
for event in events:
|
|
if not event.calendar_uid:
|
|
event.calendar_uid = calendar_service.generate_event_uid()
|
|
db.commit()
|
|
|
|
feed_name = f"{current_user.first_name}'s LOAF Events"
|
|
ics_content = calendar_service.create_subscription_feed(events, feed_name)
|
|
|
|
return StreamingResponse(
|
|
iter([ics_content]),
|
|
media_type="text/calendar",
|
|
headers={
|
|
"Content-Disposition": "inline; filename=loaf-events.ics",
|
|
"Cache-Control": "public, max-age=3600", # Cache for 1 hour
|
|
"ETag": f'"{hash(ics_content)}"'
|
|
}
|
|
)
|
|
|
|
@api_router.get("/calendars/all-events.ics")
|
|
async def download_all_events(
|
|
current_user: User = Depends(get_active_member),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Download all upcoming published events as .ics file (one-time download)
|
|
Useful for importing all events at once
|
|
"""
|
|
events = db.query(Event).filter(
|
|
Event.published == True,
|
|
Event.start_at > datetime.now(timezone.utc)
|
|
).order_by(Event.start_at).all()
|
|
|
|
# Generate UIDs
|
|
for event in events:
|
|
if not event.calendar_uid:
|
|
event.calendar_uid = calendar_service.generate_event_uid()
|
|
db.commit()
|
|
|
|
ics_content = calendar_service.create_subscription_feed(events, "All LOAF Events")
|
|
|
|
return StreamingResponse(
|
|
iter([ics_content]),
|
|
media_type="text/calendar",
|
|
headers={
|
|
"Content-Disposition": "attachment; filename=loaf-all-events.ics",
|
|
"Cache-Control": "public, max-age=600" # Cache for 10 minutes
|
|
}
|
|
)
|
|
|
|
# ============================================================================
|
|
# Newsletter Archive Routes (Members Only)
|
|
# ============================================================================
|
|
@api_router.get("/newsletters")
|
|
async def get_newsletters(
|
|
year: Optional[int] = None,
|
|
current_user: User = Depends(get_active_member),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Get all newsletters, optionally filtered by year
|
|
Members only
|
|
"""
|
|
from models import NewsletterArchive
|
|
|
|
query = db.query(NewsletterArchive)
|
|
|
|
if year:
|
|
query = query.filter(
|
|
db.func.extract('year', NewsletterArchive.published_date) == year
|
|
)
|
|
|
|
newsletters = query.order_by(NewsletterArchive.published_date.desc()).all()
|
|
|
|
return [{
|
|
"id": str(n.id),
|
|
"title": n.title,
|
|
"description": n.description,
|
|
"published_date": n.published_date.isoformat(),
|
|
"document_url": n.document_url,
|
|
"document_type": n.document_type,
|
|
"file_size_bytes": n.file_size_bytes,
|
|
"created_at": n.created_at.isoformat()
|
|
} for n in newsletters]
|
|
|
|
@api_router.get("/newsletters/years")
|
|
async def get_newsletter_years(
|
|
current_user: User = Depends(get_active_member),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Get list of years that have newsletters
|
|
Members only
|
|
"""
|
|
from models import NewsletterArchive
|
|
|
|
years = db.query(
|
|
db.func.extract('year', NewsletterArchive.published_date).label('year')
|
|
).distinct().order_by(db.text('year DESC')).all()
|
|
|
|
return [int(y.year) for y in years]
|
|
|
|
# ============================================================================
|
|
# Financial Reports Routes (Members Only)
|
|
# ============================================================================
|
|
@api_router.get("/financials")
|
|
async def get_financial_reports(
|
|
current_user: User = Depends(get_active_member),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Get all financial reports sorted by year (newest first)
|
|
Members only
|
|
"""
|
|
from models import FinancialReport
|
|
|
|
reports = db.query(FinancialReport).order_by(
|
|
FinancialReport.year.desc()
|
|
).all()
|
|
|
|
return [{
|
|
"id": str(r.id),
|
|
"year": r.year,
|
|
"title": r.title,
|
|
"document_url": r.document_url,
|
|
"document_type": r.document_type,
|
|
"file_size_bytes": r.file_size_bytes,
|
|
"created_at": r.created_at.isoformat()
|
|
} for r in reports]
|
|
|
|
# ============================================================================
|
|
# Bylaws Routes (Members Only)
|
|
# ============================================================================
|
|
@api_router.get("/bylaws/current")
|
|
async def get_current_bylaws(
|
|
current_user: User = Depends(get_active_member),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Get current bylaws document
|
|
Members only
|
|
"""
|
|
from models import BylawsDocument
|
|
|
|
bylaws = db.query(BylawsDocument).filter(
|
|
BylawsDocument.is_current == True
|
|
).first()
|
|
|
|
if not bylaws:
|
|
raise HTTPException(status_code=404, detail="No current bylaws found")
|
|
|
|
return {
|
|
"id": str(bylaws.id),
|
|
"title": bylaws.title,
|
|
"version": bylaws.version,
|
|
"effective_date": bylaws.effective_date.isoformat(),
|
|
"document_url": bylaws.document_url,
|
|
"document_type": bylaws.document_type,
|
|
"file_size_bytes": bylaws.file_size_bytes,
|
|
"is_current": bylaws.is_current,
|
|
"created_at": bylaws.created_at.isoformat()
|
|
}
|
|
|
|
@api_router.get("/bylaws/history")
|
|
async def get_bylaws_history(
|
|
current_user: User = Depends(get_active_member),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Get all bylaws versions (historical)
|
|
Members only
|
|
"""
|
|
from models import BylawsDocument
|
|
|
|
history = db.query(BylawsDocument).order_by(
|
|
BylawsDocument.effective_date.desc()
|
|
).all()
|
|
|
|
return [{
|
|
"id": str(b.id),
|
|
"title": b.title,
|
|
"version": b.version,
|
|
"effective_date": b.effective_date.isoformat(),
|
|
"document_url": b.document_url,
|
|
"document_type": b.document_type,
|
|
"file_size_bytes": b.file_size_bytes,
|
|
"is_current": b.is_current,
|
|
"created_at": b.created_at.isoformat()
|
|
} for b in history]
|
|
|
|
# ============================================================================
|
|
# Configuration Endpoints
|
|
# ============================================================================
|
|
@api_router.get("/config/limits")
|
|
async def get_config_limits():
|
|
"""Get configuration limits for file uploads"""
|
|
return {
|
|
"max_file_size_bytes": int(os.getenv('MAX_FILE_SIZE_BYTES', 5242880)),
|
|
"max_storage_bytes": int(os.getenv('MAX_STORAGE_BYTES', 1073741824))
|
|
}
|
|
|
|
# ============================================================================
|
|
# Registration Form Schema Routes
|
|
# ============================================================================
|
|
|
|
# Default registration schema matching current 4-step form
|
|
DEFAULT_REGISTRATION_SCHEMA = {
|
|
"version": "1.0",
|
|
"steps": [
|
|
{
|
|
"id": "step_personal",
|
|
"title": "Personal Information",
|
|
"description": "Please provide your personal details and tell us how you heard about us.",
|
|
"order": 1,
|
|
"sections": [
|
|
{
|
|
"id": "section_personal_info",
|
|
"title": "Personal Information",
|
|
"order": 1,
|
|
"fields": [
|
|
{"id": "first_name", "type": "text", "label": "First Name", "required": True, "is_fixed": True, "mapping": "first_name", "validation": {"minLength": 1, "maxLength": 100}, "width": "half", "order": 1},
|
|
{"id": "last_name", "type": "text", "label": "Last Name", "required": True, "is_fixed": True, "mapping": "last_name", "validation": {"minLength": 1, "maxLength": 100}, "width": "half", "order": 2},
|
|
{"id": "phone", "type": "phone", "label": "Phone", "required": True, "is_fixed": False, "mapping": "phone", "width": "half", "order": 3},
|
|
{"id": "date_of_birth", "type": "date", "label": "Date of Birth", "required": True, "is_fixed": False, "mapping": "date_of_birth", "width": "half", "order": 4},
|
|
{"id": "address", "type": "text", "label": "Address", "required": True, "is_fixed": False, "mapping": "address", "width": "full", "order": 5},
|
|
{"id": "city", "type": "text", "label": "City", "required": True, "is_fixed": False, "mapping": "city", "width": "third", "order": 6},
|
|
{"id": "state", "type": "text", "label": "State", "required": True, "is_fixed": False, "mapping": "state", "width": "third", "order": 7},
|
|
{"id": "zipcode", "type": "text", "label": "Zipcode", "required": True, "is_fixed": False, "mapping": "zipcode", "width": "third", "order": 8}
|
|
]
|
|
},
|
|
{
|
|
"id": "section_lead_sources",
|
|
"title": "How Did You Hear About Us?",
|
|
"order": 2,
|
|
"fields": [
|
|
{"id": "lead_sources", "type": "multiselect", "label": "How did you hear about us?", "required": True, "is_fixed": False, "mapping": "lead_sources", "width": "full", "order": 1, "options": [
|
|
{"value": "Current member", "label": "Current member"},
|
|
{"value": "Friend", "label": "Friend"},
|
|
{"value": "OutSmart Magazine", "label": "OutSmart Magazine"},
|
|
{"value": "Search engine (Google etc.)", "label": "Search engine (Google etc.)"},
|
|
{"value": "I've known about LOAF for a long time", "label": "I've known about LOAF for a long time"},
|
|
{"value": "Other", "label": "Other"}
|
|
]}
|
|
]
|
|
},
|
|
{
|
|
"id": "section_partner",
|
|
"title": "Partner Information (Optional)",
|
|
"order": 3,
|
|
"fields": [
|
|
{"id": "partner_first_name", "type": "text", "label": "Partner First Name", "required": False, "is_fixed": False, "mapping": "partner_first_name", "width": "half", "order": 1},
|
|
{"id": "partner_last_name", "type": "text", "label": "Partner Last Name", "required": False, "is_fixed": False, "mapping": "partner_last_name", "width": "half", "order": 2},
|
|
{"id": "partner_is_member", "type": "checkbox", "label": "Is your partner already a member?", "required": False, "is_fixed": False, "mapping": "partner_is_member", "width": "full", "order": 3},
|
|
{"id": "partner_plan_to_become_member", "type": "checkbox", "label": "Does your partner plan to become a member?", "required": False, "is_fixed": False, "mapping": "partner_plan_to_become_member", "width": "full", "order": 4}
|
|
]
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"id": "step_newsletter",
|
|
"title": "Newsletter & Volunteer",
|
|
"description": "Tell us about your newsletter preferences and volunteer interests.",
|
|
"order": 2,
|
|
"sections": [
|
|
{
|
|
"id": "section_referral",
|
|
"title": "Referral",
|
|
"order": 1,
|
|
"fields": [
|
|
{"id": "referred_by_member_name", "type": "text", "label": "If referred by a current member, please provide their name", "required": False, "is_fixed": False, "mapping": "referred_by_member_name", "width": "full", "order": 1, "placeholder": "Enter member name or email"}
|
|
]
|
|
},
|
|
{
|
|
"id": "section_newsletter_prefs",
|
|
"title": "Newsletter Publication Preferences",
|
|
"description": "Select what you would like published in our newsletter.",
|
|
"order": 2,
|
|
"fields": [
|
|
{"id": "newsletter_publish_name", "type": "checkbox", "label": "Publish my name", "required": False, "is_fixed": False, "mapping": "newsletter_publish_name", "width": "full", "order": 1},
|
|
{"id": "newsletter_publish_photo", "type": "checkbox", "label": "Publish my photo", "required": False, "is_fixed": False, "mapping": "newsletter_publish_photo", "width": "full", "order": 2},
|
|
{"id": "newsletter_publish_birthday", "type": "checkbox", "label": "Publish my birthday", "required": False, "is_fixed": False, "mapping": "newsletter_publish_birthday", "width": "full", "order": 3},
|
|
{"id": "newsletter_publish_none", "type": "checkbox", "label": "Don't publish anything about me", "required": False, "is_fixed": False, "mapping": "newsletter_publish_none", "width": "full", "order": 4}
|
|
],
|
|
"validation": {"atLeastOne": True, "message": "Please select at least one newsletter publication preference"}
|
|
},
|
|
{
|
|
"id": "section_volunteer",
|
|
"title": "Volunteer Interests",
|
|
"order": 3,
|
|
"fields": [
|
|
{"id": "volunteer_interests", "type": "multiselect", "label": "Select areas where you would like to volunteer", "required": False, "is_fixed": False, "mapping": "volunteer_interests", "width": "full", "order": 1, "options": [
|
|
{"value": "Events", "label": "Events"},
|
|
{"value": "Hospitality", "label": "Hospitality"},
|
|
{"value": "Newsletter", "label": "Newsletter"},
|
|
{"value": "Board", "label": "Board"},
|
|
{"value": "Community Outreach", "label": "Community Outreach"},
|
|
{"value": "Other", "label": "Other"}
|
|
]}
|
|
]
|
|
},
|
|
{
|
|
"id": "section_scholarship",
|
|
"title": "Scholarship Request",
|
|
"order": 4,
|
|
"fields": [
|
|
{"id": "scholarship_requested", "type": "checkbox", "label": "I would like to request a scholarship", "required": False, "is_fixed": False, "mapping": "scholarship_requested", "width": "full", "order": 1},
|
|
{"id": "scholarship_reason", "type": "textarea", "label": "Please explain why you are requesting a scholarship", "required": False, "is_fixed": False, "mapping": "scholarship_reason", "width": "full", "order": 2, "rows": 4}
|
|
]
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"id": "step_directory",
|
|
"title": "Member Directory",
|
|
"description": "Choose what information to display in the member directory.",
|
|
"order": 3,
|
|
"sections": [
|
|
{
|
|
"id": "section_directory_settings",
|
|
"title": "Directory Settings",
|
|
"order": 1,
|
|
"fields": [
|
|
{"id": "show_in_directory", "type": "checkbox", "label": "Show my profile in the member directory", "required": False, "is_fixed": False, "mapping": "show_in_directory", "width": "full", "order": 1},
|
|
{"id": "directory_email", "type": "email", "label": "Directory Email (if different from account email)", "required": False, "is_fixed": False, "mapping": "directory_email", "width": "full", "order": 2},
|
|
{"id": "directory_bio", "type": "textarea", "label": "Bio for directory", "required": False, "is_fixed": False, "mapping": "directory_bio", "width": "full", "order": 3, "rows": 4},
|
|
{"id": "directory_address", "type": "text", "label": "Address to display in directory", "required": False, "is_fixed": False, "mapping": "directory_address", "width": "full", "order": 4},
|
|
{"id": "directory_phone", "type": "phone", "label": "Phone to display in directory", "required": False, "is_fixed": False, "mapping": "directory_phone", "width": "half", "order": 5},
|
|
{"id": "directory_dob", "type": "date", "label": "Birthday to display in directory", "required": False, "is_fixed": False, "mapping": "directory_dob", "width": "half", "order": 6},
|
|
{"id": "directory_partner_name", "type": "text", "label": "Partner name to display in directory", "required": False, "is_fixed": False, "mapping": "directory_partner_name", "width": "full", "order": 7}
|
|
]
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"id": "step_account",
|
|
"title": "Account Setup",
|
|
"description": "Create your account credentials and accept the terms of service.",
|
|
"order": 4,
|
|
"sections": [
|
|
{
|
|
"id": "section_credentials",
|
|
"title": "Account Credentials",
|
|
"order": 1,
|
|
"fields": [
|
|
{"id": "email", "type": "email", "label": "Email Address", "required": True, "is_fixed": True, "mapping": "email", "width": "full", "order": 1},
|
|
{"id": "password", "type": "password", "label": "Password", "required": True, "is_fixed": True, "mapping": "password", "validation": {"minLength": 6}, "width": "half", "order": 2},
|
|
{"id": "confirmPassword", "type": "password", "label": "Confirm Password", "required": True, "is_fixed": True, "client_only": True, "width": "half", "order": 3, "validation": {"matchField": "password"}}
|
|
]
|
|
},
|
|
{
|
|
"id": "section_tos",
|
|
"title": "Terms of Service",
|
|
"order": 2,
|
|
"fields": [
|
|
{"id": "accepts_tos", "type": "checkbox", "label": "I accept the Terms of Service and Privacy Policy", "required": True, "is_fixed": True, "mapping": "accepts_tos", "width": "full", "order": 1}
|
|
]
|
|
}
|
|
]
|
|
}
|
|
],
|
|
"conditional_rules": [
|
|
{
|
|
"id": "rule_scholarship_reason",
|
|
"trigger_field": "scholarship_requested",
|
|
"trigger_operator": "equals",
|
|
"trigger_value": True,
|
|
"action": "show",
|
|
"target_fields": ["scholarship_reason"]
|
|
}
|
|
],
|
|
"fixed_fields": ["email", "password", "first_name", "last_name", "accepts_tos"]
|
|
}
|
|
|
|
# Supported field types with their validation options
|
|
FIELD_TYPES = {
|
|
"text": {
|
|
"name": "Text Input",
|
|
"validation_options": ["required", "minLength", "maxLength", "pattern"],
|
|
"properties": ["placeholder", "width"]
|
|
},
|
|
"email": {
|
|
"name": "Email Input",
|
|
"validation_options": ["required"],
|
|
"properties": ["placeholder"]
|
|
},
|
|
"phone": {
|
|
"name": "Phone Input",
|
|
"validation_options": ["required"],
|
|
"properties": ["placeholder"]
|
|
},
|
|
"date": {
|
|
"name": "Date Input",
|
|
"validation_options": ["required", "min_date", "max_date"],
|
|
"properties": []
|
|
},
|
|
"dropdown": {
|
|
"name": "Dropdown Select",
|
|
"validation_options": ["required"],
|
|
"properties": ["options", "placeholder"]
|
|
},
|
|
"checkbox": {
|
|
"name": "Checkbox",
|
|
"validation_options": ["required"],
|
|
"properties": []
|
|
},
|
|
"radio": {
|
|
"name": "Radio Group",
|
|
"validation_options": ["required"],
|
|
"properties": ["options"]
|
|
},
|
|
"multiselect": {
|
|
"name": "Multi-Select",
|
|
"validation_options": ["required", "min_selections", "max_selections"],
|
|
"properties": ["options"]
|
|
},
|
|
"address_group": {
|
|
"name": "Address Group",
|
|
"validation_options": ["required"],
|
|
"properties": []
|
|
},
|
|
"textarea": {
|
|
"name": "Text Area",
|
|
"validation_options": ["required", "minLength", "maxLength"],
|
|
"properties": ["rows", "placeholder"]
|
|
},
|
|
"file_upload": {
|
|
"name": "File Upload",
|
|
"validation_options": ["required", "file_types", "max_size"],
|
|
"properties": ["allowed_types", "max_size_mb"]
|
|
},
|
|
"password": {
|
|
"name": "Password Input",
|
|
"validation_options": ["required", "minLength"],
|
|
"properties": []
|
|
}
|
|
}
|
|
|
|
|
|
class RegistrationSchemaRequest(BaseModel):
|
|
"""Request model for updating registration schema"""
|
|
schema_data: dict
|
|
|
|
|
|
def get_registration_schema(db: Session) -> dict:
|
|
"""Get the current registration form schema from database or return default"""
|
|
setting = db.query(SystemSettings).filter(
|
|
SystemSettings.setting_key == "registration.form_schema"
|
|
).first()
|
|
|
|
if setting and setting.setting_value:
|
|
import json
|
|
try:
|
|
return json.loads(setting.setting_value)
|
|
except json.JSONDecodeError:
|
|
logger.error("Failed to parse registration schema from database")
|
|
return DEFAULT_REGISTRATION_SCHEMA.copy()
|
|
|
|
return DEFAULT_REGISTRATION_SCHEMA.copy()
|
|
|
|
|
|
def save_registration_schema(db: Session, schema: dict, user_id: Optional[uuid.UUID] = None) -> None:
|
|
"""Save registration schema to database"""
|
|
import json
|
|
|
|
setting = db.query(SystemSettings).filter(
|
|
SystemSettings.setting_key == "registration.form_schema"
|
|
).first()
|
|
|
|
schema_json = json.dumps(schema)
|
|
|
|
if setting:
|
|
setting.setting_value = schema_json
|
|
setting.updated_by = user_id
|
|
setting.updated_at = datetime.now(timezone.utc)
|
|
else:
|
|
from models import SettingType
|
|
setting = SystemSettings(
|
|
setting_key="registration.form_schema",
|
|
setting_value=schema_json,
|
|
setting_type=SettingType.json,
|
|
description="Dynamic registration form schema defining steps, fields, and validation rules",
|
|
updated_by=user_id,
|
|
is_sensitive=False
|
|
)
|
|
db.add(setting)
|
|
|
|
db.commit()
|
|
|
|
|
|
def validate_schema(schema: dict) -> tuple[bool, list[str]]:
|
|
"""Validate registration schema structure"""
|
|
errors = []
|
|
|
|
# Check version
|
|
if "version" not in schema:
|
|
errors.append("Schema must have a version field")
|
|
|
|
# Check steps
|
|
if "steps" not in schema or not isinstance(schema.get("steps"), list):
|
|
errors.append("Schema must have a steps array")
|
|
return False, errors
|
|
|
|
if len(schema["steps"]) == 0:
|
|
errors.append("Schema must have at least one step")
|
|
|
|
if len(schema["steps"]) > 10:
|
|
errors.append("Schema cannot have more than 10 steps")
|
|
|
|
# Check fixed fields exist
|
|
fixed_fields = schema.get("fixed_fields", ["email", "password", "first_name", "last_name", "accepts_tos"])
|
|
all_field_ids = set()
|
|
|
|
for step in schema.get("steps", []):
|
|
if "id" not in step:
|
|
errors.append(f"Step missing id")
|
|
continue
|
|
|
|
if "sections" not in step or not isinstance(step.get("sections"), list):
|
|
errors.append(f"Step {step.get('id')} must have sections array")
|
|
continue
|
|
|
|
for section in step.get("sections", []):
|
|
if "fields" not in section or not isinstance(section.get("fields"), list):
|
|
errors.append(f"Section {section.get('id')} must have fields array")
|
|
continue
|
|
|
|
for field in section.get("fields", []):
|
|
if "id" not in field:
|
|
errors.append(f"Field missing id in section {section.get('id')}")
|
|
continue
|
|
|
|
all_field_ids.add(field["id"])
|
|
|
|
if "type" not in field:
|
|
errors.append(f"Field {field['id']} missing type")
|
|
|
|
if field.get("type") not in FIELD_TYPES:
|
|
errors.append(f"Field {field['id']} has invalid type: {field.get('type')}")
|
|
|
|
# Verify fixed fields are present
|
|
for fixed_field in fixed_fields:
|
|
if fixed_field not in all_field_ids:
|
|
errors.append(f"Fixed field '{fixed_field}' must be present in schema")
|
|
|
|
# Field limit check
|
|
if len(all_field_ids) > 100:
|
|
errors.append("Schema cannot have more than 100 fields")
|
|
|
|
return len(errors) == 0, errors
|
|
|
|
|
|
def evaluate_conditional_rules(form_data: dict, rules: list) -> set:
|
|
"""Evaluate conditional rules and return set of visible field IDs"""
|
|
visible_fields = set()
|
|
|
|
# Start with all fields visible
|
|
for rule in rules:
|
|
target_fields = rule.get("target_fields", [])
|
|
if rule.get("action") == "hide":
|
|
visible_fields.update(target_fields)
|
|
|
|
# Apply rules
|
|
for rule in rules:
|
|
trigger_field = rule.get("trigger_field")
|
|
trigger_value = rule.get("trigger_value")
|
|
trigger_operator = rule.get("trigger_operator", "equals")
|
|
action = rule.get("action", "show")
|
|
target_fields = rule.get("target_fields", [])
|
|
|
|
field_value = form_data.get(trigger_field)
|
|
|
|
# Evaluate condition
|
|
condition_met = False
|
|
if trigger_operator == "equals":
|
|
condition_met = field_value == trigger_value
|
|
elif trigger_operator == "not_equals":
|
|
condition_met = field_value != trigger_value
|
|
elif trigger_operator == "contains":
|
|
condition_met = trigger_value in (field_value or []) if isinstance(field_value, list) else trigger_value in str(field_value or "")
|
|
elif trigger_operator == "not_empty":
|
|
condition_met = bool(field_value)
|
|
elif trigger_operator == "empty":
|
|
condition_met = not bool(field_value)
|
|
|
|
# Apply action
|
|
if condition_met:
|
|
if action == "show":
|
|
visible_fields.update(target_fields)
|
|
elif action == "hide":
|
|
visible_fields -= set(target_fields)
|
|
|
|
return visible_fields
|
|
|
|
|
|
def validate_field_by_type(field: dict, value) -> list[str]:
|
|
"""Validate a field value based on its type and validation rules"""
|
|
errors = []
|
|
field_type = field.get("type")
|
|
validation = field.get("validation", {})
|
|
label = field.get("label", field.get("id"))
|
|
|
|
if field_type == "text" or field_type == "textarea":
|
|
if not isinstance(value, str):
|
|
errors.append(f"{label} must be text")
|
|
return errors
|
|
if "minLength" in validation and len(value) < validation["minLength"]:
|
|
errors.append(f"{label} must be at least {validation['minLength']} characters")
|
|
if "maxLength" in validation and len(value) > validation["maxLength"]:
|
|
errors.append(f"{label} must be at most {validation['maxLength']} characters")
|
|
if "pattern" in validation:
|
|
import re
|
|
if not re.match(validation["pattern"], value):
|
|
errors.append(f"{label} format is invalid")
|
|
|
|
elif field_type == "email":
|
|
import re
|
|
email_pattern = r'^[^\s@]+@[^\s@]+\.[^\s@]+$'
|
|
if not re.match(email_pattern, str(value)):
|
|
errors.append(f"{label} must be a valid email address")
|
|
|
|
elif field_type == "phone":
|
|
if not isinstance(value, str) or len(value) < 7:
|
|
errors.append(f"{label} must be a valid phone number")
|
|
|
|
elif field_type == "date":
|
|
# Date validation happens during parsing
|
|
pass
|
|
|
|
elif field_type == "dropdown" or field_type == "radio":
|
|
options = [opt.get("value") for opt in field.get("options", [])]
|
|
if value not in options:
|
|
errors.append(f"{label} must be one of the available options")
|
|
|
|
elif field_type == "multiselect":
|
|
if not isinstance(value, list):
|
|
errors.append(f"{label} must be a list of selections")
|
|
else:
|
|
options = [opt.get("value") for opt in field.get("options", [])]
|
|
for v in value:
|
|
if v not in options:
|
|
errors.append(f"{label} contains invalid option: {v}")
|
|
if "min_selections" in validation and len(value) < validation["min_selections"]:
|
|
errors.append(f"{label} requires at least {validation['min_selections']} selections")
|
|
if "max_selections" in validation and len(value) > validation["max_selections"]:
|
|
errors.append(f"{label} allows at most {validation['max_selections']} selections")
|
|
|
|
elif field_type == "checkbox":
|
|
if not isinstance(value, bool):
|
|
errors.append(f"{label} must be true or false")
|
|
|
|
elif field_type == "password":
|
|
if not isinstance(value, str):
|
|
errors.append(f"{label} must be text")
|
|
elif "minLength" in validation and len(value) < validation["minLength"]:
|
|
errors.append(f"{label} must be at least {validation['minLength']} characters")
|
|
|
|
return errors
|
|
|
|
|
|
def validate_dynamic_registration(data: dict, schema: dict) -> tuple[bool, list[str]]:
|
|
"""Validate registration data against dynamic schema"""
|
|
errors = []
|
|
conditional_rules = schema.get("conditional_rules", [])
|
|
|
|
# Get all fields and their visibility based on conditional rules
|
|
hidden_fields = set()
|
|
for rule in conditional_rules:
|
|
if rule.get("action") == "show":
|
|
# Fields are hidden by default if they have a "show" rule
|
|
hidden_fields.update(rule.get("target_fields", []))
|
|
|
|
# Evaluate which hidden fields should now be visible
|
|
visible_conditional_fields = evaluate_conditional_rules(data, conditional_rules)
|
|
hidden_fields -= visible_conditional_fields
|
|
|
|
for step in schema.get("steps", []):
|
|
for section in step.get("sections", []):
|
|
# Check section-level validation
|
|
section_validation = section.get("validation", {})
|
|
if section_validation.get("atLeastOne"):
|
|
field_ids = [f["id"] for f in section.get("fields", [])]
|
|
has_value = any(data.get(fid) for fid in field_ids)
|
|
if not has_value:
|
|
errors.append(section_validation.get("message", f"At least one field in {section.get('title', 'this section')} is required"))
|
|
|
|
for field in section.get("fields", []):
|
|
field_id = field.get("id")
|
|
|
|
# Skip hidden fields
|
|
if field_id in hidden_fields:
|
|
continue
|
|
|
|
# Skip client-only fields (like confirmPassword)
|
|
if field.get("client_only"):
|
|
continue
|
|
|
|
value = data.get(field_id)
|
|
|
|
# Required check
|
|
if field.get("required"):
|
|
if value is None or value == "" or (isinstance(value, list) and len(value) == 0):
|
|
errors.append(f"{field.get('label', field_id)} is required")
|
|
continue
|
|
|
|
# Type-specific validation
|
|
if value is not None and value != "":
|
|
field_errors = validate_field_by_type(field, value)
|
|
errors.extend(field_errors)
|
|
|
|
return len(errors) == 0, errors
|
|
|
|
|
|
def split_registration_data(data: dict, schema: dict) -> tuple[dict, dict]:
|
|
"""Split registration data into User model fields and custom fields"""
|
|
user_data = {}
|
|
custom_data = {}
|
|
|
|
# Get field mappings from schema
|
|
field_mappings = {}
|
|
for step in schema.get("steps", []):
|
|
for section in step.get("sections", []):
|
|
for field in section.get("fields", []):
|
|
if field.get("mapping"):
|
|
field_mappings[field["id"]] = field["mapping"]
|
|
|
|
# User model fields that have direct column mappings
|
|
user_model_fields = {
|
|
"email", "password", "first_name", "last_name", "phone", "address",
|
|
"city", "state", "zipcode", "date_of_birth", "lead_sources",
|
|
"partner_first_name", "partner_last_name", "partner_is_member",
|
|
"partner_plan_to_become_member", "referred_by_member_name",
|
|
"newsletter_publish_name", "newsletter_publish_photo",
|
|
"newsletter_publish_birthday", "newsletter_publish_none",
|
|
"volunteer_interests", "scholarship_requested", "scholarship_reason",
|
|
"show_in_directory", "directory_email", "directory_bio",
|
|
"directory_address", "directory_phone", "directory_dob",
|
|
"directory_partner_name", "accepts_tos"
|
|
}
|
|
|
|
for field_id, value in data.items():
|
|
mapping = field_mappings.get(field_id, field_id)
|
|
|
|
# Skip client-only fields
|
|
if field_id == "confirmPassword":
|
|
continue
|
|
|
|
if mapping in user_model_fields:
|
|
user_data[mapping] = value
|
|
else:
|
|
custom_data[field_id] = value
|
|
|
|
return user_data, custom_data
|
|
|
|
|
|
# Public endpoint - returns schema for registration form
|
|
@api_router.get("/registration/schema")
|
|
async def get_public_registration_schema(db: Session = Depends(get_db)):
|
|
"""Get registration form schema for public registration page"""
|
|
schema = get_registration_schema(db)
|
|
# Return a clean version without internal metadata
|
|
return {
|
|
"version": schema.get("version"),
|
|
"steps": schema.get("steps", []),
|
|
"conditional_rules": schema.get("conditional_rules", []),
|
|
"fixed_fields": schema.get("fixed_fields", [])
|
|
}
|
|
|
|
|
|
# Admin endpoint - returns schema with metadata
|
|
@api_router.get("/admin/registration/schema")
|
|
async def get_admin_registration_schema(
|
|
current_user: User = Depends(require_permission("registration.view")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Get registration form schema with admin metadata"""
|
|
schema = get_registration_schema(db)
|
|
|
|
# Get version info
|
|
setting = db.query(SystemSettings).filter(
|
|
SystemSettings.setting_key == "registration.form_schema"
|
|
).first()
|
|
|
|
return {
|
|
"schema": schema,
|
|
"metadata": {
|
|
"last_updated": setting.updated_at.isoformat() if setting else None,
|
|
"updated_by": str(setting.updated_by) if setting and setting.updated_by else None,
|
|
"is_default": setting is None
|
|
}
|
|
}
|
|
|
|
|
|
# Admin endpoint - update schema
|
|
@api_router.put("/admin/registration/schema")
|
|
async def update_registration_schema(
|
|
request: RegistrationSchemaRequest,
|
|
current_user: User = Depends(require_permission("registration.manage")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Update registration form schema"""
|
|
schema = request.schema_data
|
|
|
|
# Validate schema structure
|
|
is_valid, errors = validate_schema(schema)
|
|
if not is_valid:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail={"message": "Invalid schema", "errors": errors}
|
|
)
|
|
|
|
# Save schema
|
|
save_registration_schema(db, schema, current_user.id)
|
|
|
|
logger.info(f"Registration schema updated by user {current_user.email}")
|
|
|
|
return {"message": "Registration schema updated successfully"}
|
|
|
|
|
|
# Admin endpoint - validate schema without saving
|
|
@api_router.post("/admin/registration/schema/validate")
|
|
async def validate_registration_schema_endpoint(
|
|
request: RegistrationSchemaRequest,
|
|
current_user: User = Depends(require_permission("registration.manage")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Validate registration form schema without saving"""
|
|
schema = request.schema_data
|
|
is_valid, errors = validate_schema(schema)
|
|
|
|
return {
|
|
"valid": is_valid,
|
|
"errors": errors
|
|
}
|
|
|
|
|
|
# Admin endpoint - reset schema to default
|
|
@api_router.post("/admin/registration/schema/reset")
|
|
async def reset_registration_schema(
|
|
current_user: User = Depends(require_permission("registration.manage")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Reset registration form schema to default"""
|
|
save_registration_schema(db, DEFAULT_REGISTRATION_SCHEMA.copy(), current_user.id)
|
|
|
|
logger.info(f"Registration schema reset to default by user {current_user.email}")
|
|
|
|
return {"message": "Registration schema reset to default"}
|
|
|
|
|
|
# Admin endpoint - get available field types
|
|
@api_router.get("/admin/registration/field-types")
|
|
async def get_field_types(
|
|
current_user: User = Depends(require_permission("registration.view")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Get available field types for registration form builder"""
|
|
return FIELD_TYPES
|
|
|
|
|
|
# ============================================================================
|
|
# Admin Routes
|
|
# ============================================================================
|
|
@api_router.get("/admin/storage/usage")
|
|
async def get_storage_usage(
|
|
current_user: User = Depends(require_permission("settings.storage")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Get current storage usage statistics"""
|
|
from models import StorageUsage
|
|
|
|
storage = db.query(StorageUsage).first()
|
|
|
|
if not storage:
|
|
# Initialize if doesn't exist
|
|
storage = StorageUsage(
|
|
total_bytes_used=0,
|
|
max_bytes_allowed=int(os.getenv('MAX_STORAGE_BYTES', 1073741824))
|
|
)
|
|
db.add(storage)
|
|
db.commit()
|
|
db.refresh(storage)
|
|
|
|
percentage = (storage.total_bytes_used / storage.max_bytes_allowed) * 100 if storage.max_bytes_allowed > 0 else 0
|
|
|
|
return {
|
|
"total_bytes_used": storage.total_bytes_used,
|
|
"max_bytes_allowed": storage.max_bytes_allowed,
|
|
"percentage": round(percentage, 2),
|
|
"available_bytes": storage.max_bytes_allowed - storage.total_bytes_used
|
|
}
|
|
|
|
@api_router.get("/admin/storage/breakdown")
|
|
async def get_storage_breakdown(
|
|
current_user: User = Depends(require_permission("settings.storage")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Get storage usage breakdown by category"""
|
|
from sqlalchemy import func
|
|
from models import User, EventGallery, NewsletterArchive, FinancialReport, BylawsDocument
|
|
|
|
# Count storage by category
|
|
# Note: profile_photos removed - User.profile_photo_size field doesn't exist
|
|
# If needed in future, add profile_photo_size column to User model
|
|
gallery_images = db.query(func.coalesce(func.sum(EventGallery.file_size_bytes), 0)).scalar() or 0
|
|
newsletters = db.query(func.coalesce(func.sum(NewsletterArchive.file_size_bytes), 0)).filter(
|
|
NewsletterArchive.document_type == 'upload'
|
|
).scalar() or 0
|
|
financials = db.query(func.coalesce(func.sum(FinancialReport.file_size_bytes), 0)).filter(
|
|
FinancialReport.document_type == 'upload'
|
|
).scalar() or 0
|
|
bylaws = db.query(func.coalesce(func.sum(BylawsDocument.file_size_bytes), 0)).filter(
|
|
BylawsDocument.document_type == 'upload'
|
|
).scalar() or 0
|
|
|
|
return {
|
|
"breakdown": {
|
|
"gallery_images": gallery_images,
|
|
"newsletters": newsletters,
|
|
"financials": financials,
|
|
"bylaws": bylaws
|
|
},
|
|
"total": gallery_images + newsletters + financials + bylaws
|
|
}
|
|
|
|
|
|
@api_router.get("/admin/users")
|
|
async def get_all_users(
|
|
status: Optional[str] = None,
|
|
db: Session = Depends(get_db),
|
|
current_user: User = Depends(require_permission("users.view"))
|
|
):
|
|
query = db.query(User)
|
|
|
|
if status:
|
|
try:
|
|
status_enum = UserStatus(status)
|
|
query = query.filter(User.status == status_enum)
|
|
except ValueError:
|
|
raise HTTPException(status_code=400, detail="Invalid status")
|
|
|
|
users = query.order_by(User.created_at.desc()).all()
|
|
|
|
return [
|
|
{
|
|
"id": str(user.id),
|
|
"email": user.email,
|
|
"first_name": user.first_name,
|
|
"last_name": user.last_name,
|
|
"phone": user.phone,
|
|
"status": user.status.value,
|
|
"role": get_user_role_code(user),
|
|
"email_verified": user.email_verified,
|
|
"created_at": user.created_at.isoformat(),
|
|
"lead_sources": user.lead_sources,
|
|
"referred_by_member_name": user.referred_by_member_name
|
|
}
|
|
for user in users
|
|
]
|
|
|
|
# IMPORTANT: All specific routes (/create, /invite, /invitations, /export, /import)
|
|
# must be defined ABOVE this {user_id} route to avoid path conflicts
|
|
|
|
@api_router.get("/admin/users/invitations")
|
|
async def get_invitations(
|
|
status: Optional[str] = None,
|
|
current_user: User = Depends(require_permission("users.view")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
List all invitations with optional status filter
|
|
Admin/Superadmin only
|
|
"""
|
|
query = db.query(UserInvitation)
|
|
|
|
if status:
|
|
try:
|
|
status_enum = InvitationStatus[status]
|
|
query = query.filter(UserInvitation.status == status_enum)
|
|
except KeyError:
|
|
raise HTTPException(status_code=400, detail=f"Invalid status: {status}")
|
|
|
|
invitations = query.order_by(UserInvitation.invited_at.desc()).all()
|
|
|
|
return [
|
|
{
|
|
"id": str(inv.id),
|
|
"email": inv.email,
|
|
"role": inv.role.value,
|
|
"status": inv.status.value,
|
|
"first_name": inv.first_name,
|
|
"last_name": inv.last_name,
|
|
"phone": inv.phone,
|
|
"invited_by": str(inv.invited_by),
|
|
"invited_at": inv.invited_at.isoformat(),
|
|
"expires_at": inv.expires_at.isoformat(),
|
|
"accepted_at": inv.accepted_at.isoformat() if inv.accepted_at else None
|
|
}
|
|
for inv in invitations
|
|
]
|
|
|
|
@api_router.get("/admin/users/export")
|
|
async def export_users_csv(
|
|
status: Optional[str] = None,
|
|
role: Optional[str] = None,
|
|
email_verified: Optional[bool] = None,
|
|
search: Optional[str] = None,
|
|
current_user: User = Depends(require_permission("users.export")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Export users to CSV with optional filters
|
|
Admin/Superadmin only
|
|
Requires permission: users.export
|
|
"""
|
|
# Build query
|
|
query = db.query(User)
|
|
|
|
# Apply filters
|
|
if status:
|
|
try:
|
|
status_enum = UserStatus[status]
|
|
query = query.filter(User.status == status_enum)
|
|
except KeyError:
|
|
raise HTTPException(status_code=400, detail=f"Invalid status: {status}")
|
|
|
|
if role:
|
|
try:
|
|
role_enum = UserRole[role]
|
|
query = query.filter(User.role == role_enum)
|
|
except KeyError:
|
|
raise HTTPException(status_code=400, detail=f"Invalid role: {role}")
|
|
|
|
if email_verified is not None:
|
|
query = query.filter(User.email_verified == email_verified)
|
|
|
|
if search:
|
|
search_filter = or_(
|
|
User.email.ilike(f"%{search}%"),
|
|
User.first_name.ilike(f"%{search}%"),
|
|
User.last_name.ilike(f"%{search}%")
|
|
)
|
|
query = query.filter(search_filter)
|
|
|
|
# Get all matching users
|
|
users = query.order_by(User.created_at.desc()).all()
|
|
|
|
# Create CSV in memory
|
|
output = io.StringIO()
|
|
writer = csv.writer(output)
|
|
|
|
# Write header
|
|
writer.writerow([
|
|
'ID',
|
|
'Email',
|
|
'First Name',
|
|
'Last Name',
|
|
'Phone',
|
|
'Role',
|
|
'Status',
|
|
'Email Verified',
|
|
'Address',
|
|
'City',
|
|
'State',
|
|
'Zipcode',
|
|
'Date of Birth',
|
|
'Member Since',
|
|
'Partner First Name',
|
|
'Partner Last Name',
|
|
'Partner Is Member',
|
|
'Partner Plan to Become Member',
|
|
'Referred By Member Name',
|
|
'Lead Sources',
|
|
'Created At',
|
|
'Updated At'
|
|
])
|
|
|
|
# Write data rows
|
|
for user in users:
|
|
writer.writerow([
|
|
str(user.id),
|
|
user.email,
|
|
user.first_name,
|
|
user.last_name,
|
|
user.phone,
|
|
get_user_role_code(user),
|
|
user.status.value,
|
|
'Yes' if user.email_verified else 'No',
|
|
user.address or '',
|
|
user.city or '',
|
|
user.state or '',
|
|
user.zipcode or '',
|
|
user.date_of_birth.strftime('%Y-%m-%d') if user.date_of_birth else '',
|
|
user.member_since.strftime('%Y-%m-%d') if user.member_since else '',
|
|
user.partner_first_name or '',
|
|
user.partner_last_name or '',
|
|
'Yes' if user.partner_is_member else 'No',
|
|
'Yes' if user.partner_plan_to_become_member else 'No',
|
|
user.referred_by_member_name or '',
|
|
','.join(user.lead_sources) if user.lead_sources else '',
|
|
user.created_at.strftime('%Y-%m-%d %H:%M:%S'),
|
|
user.updated_at.strftime('%Y-%m-%d %H:%M:%S') if user.updated_at else ''
|
|
])
|
|
|
|
# Prepare response
|
|
output.seek(0)
|
|
|
|
# Generate filename with timestamp
|
|
timestamp = datetime.now(timezone.utc).strftime('%Y%m%d_%H%M%S')
|
|
filename = f"members_export_{timestamp}.csv"
|
|
|
|
logger.info(f"Admin {current_user.email} exported {len(users)} users to CSV")
|
|
|
|
return StreamingResponse(
|
|
iter([output.getvalue()]),
|
|
media_type="text/csv",
|
|
headers={
|
|
"Content-Disposition": f"attachment; filename={filename}"
|
|
}
|
|
)
|
|
|
|
@api_router.get("/admin/users/{user_id}")
|
|
async def get_user_by_id(
|
|
user_id: str,
|
|
db: Session = Depends(get_db),
|
|
current_user: User = Depends(require_permission("users.view"))
|
|
):
|
|
"""Get specific user by ID (admin only)"""
|
|
user = db.query(User).filter(User.id == user_id).first()
|
|
if not user:
|
|
raise HTTPException(status_code=404, detail="User not found")
|
|
|
|
return {
|
|
"id": str(user.id),
|
|
"email": user.email,
|
|
"first_name": user.first_name,
|
|
"last_name": user.last_name,
|
|
"phone": user.phone,
|
|
"address": user.address,
|
|
"city": user.city,
|
|
"state": user.state,
|
|
"zipcode": user.zipcode,
|
|
"date_of_birth": user.date_of_birth.isoformat() if user.date_of_birth else None,
|
|
"profile_photo_url": user.profile_photo_url,
|
|
"partner_first_name": user.partner_first_name,
|
|
"partner_last_name": user.partner_last_name,
|
|
"partner_is_member": user.partner_is_member,
|
|
"partner_plan_to_become_member": user.partner_plan_to_become_member,
|
|
"referred_by_member_name": user.referred_by_member_name,
|
|
"status": user.status.value,
|
|
"role": get_user_role_code(user),
|
|
"email_verified": user.email_verified,
|
|
"newsletter_subscribed": user.newsletter_subscribed,
|
|
"lead_sources": user.lead_sources,
|
|
"member_since": user.member_since.isoformat() if user.member_since else None,
|
|
"created_at": user.created_at.isoformat() if user.created_at else None,
|
|
"updated_at": user.updated_at.isoformat() if user.updated_at else None
|
|
}
|
|
|
|
@api_router.get("/admin/users/{user_id}/transactions")
|
|
async def get_user_transactions(
|
|
user_id: str,
|
|
db: Session = Depends(get_db),
|
|
current_user: User = Depends(require_permission("users.view"))
|
|
):
|
|
"""
|
|
Get a specific user's transaction history (admin only).
|
|
Returns subscriptions and donations for the specified user.
|
|
"""
|
|
# Verify user exists
|
|
user = db.query(User).filter(User.id == user_id).first()
|
|
if not user:
|
|
raise HTTPException(status_code=404, detail="User not found")
|
|
|
|
# Get user's subscriptions with plan details
|
|
subscriptions = db.query(Subscription).filter(
|
|
Subscription.user_id == user_id
|
|
).order_by(Subscription.created_at.desc()).all()
|
|
|
|
subscription_list = []
|
|
for sub in subscriptions:
|
|
plan = db.query(SubscriptionPlan).filter(SubscriptionPlan.id == sub.plan_id).first()
|
|
subscription_list.append({
|
|
"id": str(sub.id),
|
|
"type": "subscription",
|
|
"description": plan.name if plan else "Subscription",
|
|
"amount_cents": sub.amount_paid_cents or (sub.base_subscription_cents + sub.donation_cents),
|
|
"base_amount_cents": sub.base_subscription_cents,
|
|
"donation_cents": sub.donation_cents,
|
|
"status": sub.status.value if sub.status else "unknown",
|
|
"payment_method": sub.payment_method,
|
|
"card_brand": sub.card_brand,
|
|
"card_last4": sub.card_last4,
|
|
"stripe_receipt_url": sub.stripe_receipt_url,
|
|
"created_at": sub.created_at.isoformat() if sub.created_at else None,
|
|
"payment_completed_at": sub.payment_completed_at.isoformat() if sub.payment_completed_at else None,
|
|
"start_date": sub.start_date.isoformat() if sub.start_date else None,
|
|
"end_date": sub.end_date.isoformat() if sub.end_date else None,
|
|
"billing_cycle": plan.billing_cycle if plan else None,
|
|
"manual_payment": sub.manual_payment,
|
|
"manual_payment_notes": sub.manual_payment_notes
|
|
})
|
|
|
|
# Get user's donations
|
|
donations = db.query(Donation).filter(
|
|
Donation.user_id == user_id
|
|
).order_by(Donation.created_at.desc()).all()
|
|
|
|
donation_list = []
|
|
for don in donations:
|
|
donation_list.append({
|
|
"id": str(don.id),
|
|
"type": "donation",
|
|
"description": "Donation",
|
|
"amount_cents": don.amount_cents,
|
|
"status": don.status.value if don.status else "unknown",
|
|
"payment_method": don.payment_method,
|
|
"card_brand": don.card_brand,
|
|
"card_last4": don.card_last4,
|
|
"stripe_receipt_url": don.stripe_receipt_url,
|
|
"created_at": don.created_at.isoformat() if don.created_at else None,
|
|
"payment_completed_at": don.payment_completed_at.isoformat() if don.payment_completed_at else None,
|
|
"notes": don.notes
|
|
})
|
|
|
|
return {
|
|
"user_id": str(user.id),
|
|
"user_name": f"{user.first_name} {user.last_name}",
|
|
"subscriptions": subscription_list,
|
|
"donations": donation_list,
|
|
"total_subscription_amount_cents": sum(s["amount_cents"] or 0 for s in subscription_list),
|
|
"total_donation_amount_cents": sum(d["amount_cents"] or 0 for d in donation_list)
|
|
}
|
|
|
|
@api_router.put("/admin/users/{user_id}")
|
|
async def update_user_profile(
|
|
user_id: str,
|
|
request: AdminUpdateUserRequest,
|
|
db: Session = Depends(get_db),
|
|
current_user: User = Depends(require_permission("users.edit"))
|
|
):
|
|
"""Update user profile fields (admin only)"""
|
|
user = db.query(User).filter(User.id == user_id).first()
|
|
if not user:
|
|
raise HTTPException(status_code=404, detail="User not found")
|
|
|
|
# Update basic personal information
|
|
if request.first_name is not None:
|
|
user.first_name = request.first_name
|
|
if request.last_name is not None:
|
|
user.last_name = request.last_name
|
|
if request.phone is not None:
|
|
user.phone = request.phone
|
|
if request.address is not None:
|
|
user.address = request.address
|
|
if request.city is not None:
|
|
user.city = request.city
|
|
if request.state is not None:
|
|
user.state = request.state
|
|
if request.zipcode is not None:
|
|
user.zipcode = request.zipcode
|
|
if request.date_of_birth is not None:
|
|
user.date_of_birth = request.date_of_birth
|
|
|
|
# Update member_since (admin only)
|
|
if request.member_since is not None:
|
|
user.member_since = request.member_since
|
|
|
|
# Update partner information
|
|
if request.partner_first_name is not None:
|
|
user.partner_first_name = request.partner_first_name
|
|
if request.partner_last_name is not None:
|
|
user.partner_last_name = request.partner_last_name
|
|
if request.partner_is_member is not None:
|
|
user.partner_is_member = request.partner_is_member
|
|
if request.partner_plan_to_become_member is not None:
|
|
user.partner_plan_to_become_member = request.partner_plan_to_become_member
|
|
if request.referred_by_member_name is not None:
|
|
user.referred_by_member_name = request.referred_by_member_name
|
|
|
|
user.updated_at = datetime.now(timezone.utc)
|
|
db.commit()
|
|
db.refresh(user)
|
|
|
|
logger.info(f"Admin {current_user.email} updated profile for user {user.email}")
|
|
|
|
return {
|
|
"message": "User profile updated successfully",
|
|
"user_id": str(user.id)
|
|
}
|
|
|
|
@api_router.put("/admin/users/{user_id}/validate")
|
|
async def validate_user(
|
|
user_id: str,
|
|
bypass_email_verification: bool = False,
|
|
db: Session = Depends(get_db),
|
|
current_user: User = Depends(require_permission("users.approve"))
|
|
):
|
|
user = db.query(User).filter(User.id == user_id).first()
|
|
if not user:
|
|
raise HTTPException(status_code=404, detail="User not found")
|
|
|
|
# Handle bypass email verification for pending_email users
|
|
if bypass_email_verification and user.status == UserStatus.pending_email:
|
|
# Verify email manually
|
|
user.email_verified = True
|
|
user.email_verification_token = None
|
|
|
|
# Determine status based on referral
|
|
if user.referred_by_member_name:
|
|
referrer = db.query(User).filter(
|
|
or_(
|
|
User.first_name + ' ' + User.last_name == user.referred_by_member_name,
|
|
User.email == user.referred_by_member_name
|
|
),
|
|
User.status == UserStatus.active
|
|
).first()
|
|
user.status = UserStatus.pre_validated if referrer else UserStatus.pending_validation
|
|
else:
|
|
user.status = UserStatus.pending_validation
|
|
|
|
logger.info(f"Admin {current_user.email} bypassed email verification for {user.email}")
|
|
|
|
# Validate user status - must be pending_validation or pre_validated
|
|
if user.status not in [UserStatus.pending_validation, UserStatus.pre_validated]:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail=f"User must have verified email first. Current: {user.status.value}"
|
|
)
|
|
|
|
# Set to payment_pending - user becomes active after payment via webhook
|
|
user.status = UserStatus.payment_pending
|
|
user.updated_at = datetime.now(timezone.utc)
|
|
|
|
db.commit()
|
|
db.refresh(user)
|
|
|
|
# Send payment prompt email
|
|
await send_payment_prompt_email(user.email, user.first_name)
|
|
|
|
logger.info(f"User validated (payment pending): {user.email} by admin: {current_user.email}")
|
|
|
|
return {"message": "User validated - payment email sent"}
|
|
|
|
@api_router.put("/admin/users/{user_id}/status")
|
|
async def update_user_status(
|
|
user_id: str,
|
|
request: UpdateUserStatusRequest,
|
|
db: Session = Depends(get_db),
|
|
current_user: User = Depends(require_permission("users.status"))
|
|
):
|
|
user = db.query(User).filter(User.id == user_id).first()
|
|
if not user:
|
|
raise HTTPException(status_code=404, detail="User not found")
|
|
|
|
try:
|
|
new_status = UserStatus(request.status)
|
|
user.status = new_status
|
|
user.updated_at = datetime.now(timezone.utc)
|
|
|
|
db.commit()
|
|
db.refresh(user)
|
|
|
|
return {"message": "User status updated successfully"}
|
|
except ValueError:
|
|
raise HTTPException(status_code=400, detail="Invalid status")
|
|
|
|
@api_router.post("/admin/users/{user_id}/reject")
|
|
async def reject_user(
|
|
user_id: str,
|
|
rejection_data: dict,
|
|
current_user: User = Depends(require_permission("users.approve")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Reject a user's membership application with mandatory reason"""
|
|
user = db.query(User).filter(User.id == user_id).first()
|
|
if not user:
|
|
raise HTTPException(status_code=404, detail="User not found")
|
|
|
|
reason = rejection_data.get("reason", "").strip()
|
|
if not reason:
|
|
raise HTTPException(status_code=400, detail="Rejection reason is required")
|
|
|
|
# Update user status to rejected
|
|
user.status = UserStatus.rejected
|
|
user.rejection_reason = reason
|
|
user.rejected_at = datetime.now(timezone.utc)
|
|
user.rejected_by = current_user.id
|
|
user.updated_at = datetime.now(timezone.utc)
|
|
|
|
db.commit()
|
|
|
|
# Send rejection email
|
|
try:
|
|
from email_service import send_rejection_email
|
|
await send_rejection_email(
|
|
user.email,
|
|
user.first_name,
|
|
reason
|
|
)
|
|
logger.info(f"Rejection email sent to {user.email}")
|
|
except Exception as e:
|
|
logger.error(f"Failed to send rejection email to {user.email}: {str(e)}")
|
|
# Don't fail the request if email fails
|
|
|
|
logger.info(f"Admin {current_user.email} rejected user {user.email}")
|
|
|
|
return {
|
|
"message": "User rejected successfully",
|
|
"user_id": str(user.id),
|
|
"status": user.status.value
|
|
}
|
|
|
|
@api_router.post("/admin/users/{user_id}/activate-payment")
|
|
async def activate_payment_manually(
|
|
user_id: str,
|
|
request: ManualPaymentRequest,
|
|
db: Session = Depends(get_db),
|
|
current_user: User = Depends(require_permission("subscriptions.activate"))
|
|
):
|
|
"""Manually activate user who paid offline (cash, bank transfer, etc.)"""
|
|
|
|
# 1. Find user
|
|
user = db.query(User).filter(User.id == user_id).first()
|
|
if not user:
|
|
raise HTTPException(status_code=404, detail="User not found")
|
|
|
|
# 2. Validate status
|
|
if user.status != UserStatus.payment_pending:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail=f"User must be in payment_pending status. Current: {user.status.value}"
|
|
)
|
|
|
|
# 3. Get subscription plan
|
|
plan = db.query(SubscriptionPlan).filter(SubscriptionPlan.id == request.plan_id).first()
|
|
if not plan:
|
|
raise HTTPException(status_code=404, detail="Subscription plan not found")
|
|
|
|
# 4. Validate amount against plan minimum
|
|
if request.amount_cents < plan.minimum_price_cents:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail=f"Amount must be at least ${plan.minimum_price_cents / 100:.2f}"
|
|
)
|
|
|
|
# 5. Calculate donation split
|
|
base_amount = plan.minimum_price_cents
|
|
donation_amount = request.amount_cents - base_amount
|
|
|
|
# 6. Calculate subscription period
|
|
from payment_service import calculate_subscription_period
|
|
|
|
if request.use_custom_period or request.override_plan_dates:
|
|
# Admin-specified custom dates override everything
|
|
if not request.custom_period_start or not request.custom_period_end:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail="Custom period start and end dates are required when use_custom_period or override_plan_dates is true"
|
|
)
|
|
period_start = request.custom_period_start
|
|
period_end = request.custom_period_end
|
|
else:
|
|
# Use plan's custom cycle or billing cycle
|
|
period_start, period_end = calculate_subscription_period(plan)
|
|
|
|
# 7. Create subscription record (manual payment) with donation tracking
|
|
subscription = Subscription(
|
|
user_id=user.id,
|
|
plan_id=plan.id,
|
|
stripe_subscription_id=None, # No Stripe involvement
|
|
stripe_customer_id=None,
|
|
status=SubscriptionStatus.active,
|
|
start_date=period_start,
|
|
end_date=period_end,
|
|
amount_paid_cents=request.amount_cents,
|
|
base_subscription_cents=base_amount,
|
|
donation_cents=donation_amount,
|
|
payment_method=request.payment_method,
|
|
manual_payment=True,
|
|
manual_payment_notes=request.notes,
|
|
manual_payment_admin_id=current_user.id,
|
|
manual_payment_date=request.payment_date
|
|
)
|
|
db.add(subscription)
|
|
|
|
# 6. Activate user
|
|
user.status = UserStatus.active
|
|
set_user_role(user, UserRole.member, db)
|
|
# Set member_since only if not already set (first time activation)
|
|
if not user.member_since:
|
|
user.member_since = datetime.now(timezone.utc)
|
|
user.updated_at = datetime.now(timezone.utc)
|
|
|
|
# 7. Commit
|
|
db.commit()
|
|
db.refresh(subscription)
|
|
|
|
# 8. Log admin action
|
|
logger.info(
|
|
f"Admin {current_user.email} manually activated payment for user {user.email} "
|
|
f"via {request.payment_method} for ${request.amount_cents/100:.2f} "
|
|
f"with plan {plan.name} ({period_start.date()} to {period_end.date()})"
|
|
)
|
|
|
|
return {
|
|
"message": "User payment activated successfully",
|
|
"user_id": str(user.id),
|
|
"subscription_id": str(subscription.id)
|
|
}
|
|
|
|
@api_router.put("/admin/users/{user_id}/reset-password")
|
|
async def admin_reset_user_password(
|
|
user_id: str,
|
|
request: AdminPasswordUpdateRequest,
|
|
current_user: User = Depends(require_permission("users.reset_password")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Admin resets user password - generates temp password and emails it"""
|
|
user = db.query(User).filter(User.id == user_id).first()
|
|
if not user:
|
|
raise HTTPException(status_code=404, detail="User not found")
|
|
|
|
# Generate random temporary password
|
|
temp_password = secrets.token_urlsafe(12)
|
|
|
|
# Update user
|
|
user.password_hash = get_password_hash(temp_password)
|
|
user.force_password_change = request.force_change
|
|
db.commit()
|
|
|
|
# Email user the temporary password
|
|
await send_admin_password_reset_email(
|
|
user.email,
|
|
user.first_name,
|
|
temp_password,
|
|
request.force_change
|
|
)
|
|
|
|
# Log admin action
|
|
logger.info(
|
|
f"Admin {current_user.email} reset password for user {user.email} "
|
|
f"(force_change={request.force_change})"
|
|
)
|
|
|
|
return {"message": f"Password reset for {user.email}. Temporary password emailed."}
|
|
|
|
@api_router.put("/admin/users/{user_id}/role")
|
|
async def change_user_role(
|
|
user_id: str,
|
|
request: ChangeRoleRequest,
|
|
current_user: User = Depends(require_permission("users.edit")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Change an existing user's role with privilege escalation prevention.
|
|
|
|
Requires: users.edit permission
|
|
|
|
Rules:
|
|
- Superadmin: Can assign any role (including superadmin)
|
|
- Admin: Can assign admin, finance, member, guest, and non-elevated custom roles
|
|
- Admin CANNOT assign: superadmin or custom roles with elevated permissions
|
|
- Users CANNOT change their own role
|
|
"""
|
|
|
|
# 1. Fetch target user
|
|
target_user = db.query(User).filter(User.id == user_id).first()
|
|
if not target_user:
|
|
raise HTTPException(status_code=404, detail="User not found")
|
|
|
|
# 2. Prevent self-role-change
|
|
if str(target_user.id) == str(current_user.id):
|
|
raise HTTPException(
|
|
status_code=403,
|
|
detail="You cannot change your own role"
|
|
)
|
|
|
|
# 3. Validate new role
|
|
if request.role not in ['guest', 'member', 'admin', 'finance', 'superadmin']:
|
|
raise HTTPException(status_code=400, detail="Invalid role")
|
|
|
|
# 4. Privilege escalation check
|
|
if current_user.role != 'superadmin':
|
|
# Non-superadmin cannot assign superadmin role
|
|
if request.role == 'superadmin':
|
|
raise HTTPException(
|
|
status_code=403,
|
|
detail="Only superadmin can assign superadmin role"
|
|
)
|
|
|
|
# Check custom role elevation
|
|
if request.role_id:
|
|
custom_role = db.query(Role).filter(Role.id == request.role_id).first()
|
|
if not custom_role:
|
|
raise HTTPException(status_code=404, detail="Custom role not found")
|
|
|
|
# Check if custom role has elevated permissions
|
|
elevated_permissions = ['users.delete', 'roles.create', 'roles.edit',
|
|
'roles.delete', 'permissions.edit']
|
|
role_perms = db.query(Permission.name).join(RolePermission).filter(
|
|
RolePermission.role_id == custom_role.id,
|
|
Permission.name.in_(elevated_permissions)
|
|
).all()
|
|
|
|
if role_perms:
|
|
raise HTTPException(
|
|
status_code=403,
|
|
detail=f"Cannot assign role with elevated permissions: {custom_role.name}"
|
|
)
|
|
|
|
# 5. Update role with audit trail
|
|
old_role = target_user.role
|
|
old_role_id = target_user.role_id
|
|
|
|
target_user.role = request.role
|
|
target_user.role_id = request.role_id if request.role_id else None
|
|
target_user.role_changed_at = datetime.now(timezone.utc)
|
|
target_user.role_changed_by = current_user.id
|
|
target_user.updated_at = datetime.now(timezone.utc)
|
|
|
|
db.commit()
|
|
db.refresh(target_user)
|
|
|
|
# Log admin action
|
|
logger.info(
|
|
f"Admin {current_user.email} changed role for user {target_user.email} "
|
|
f"from {old_role} to {request.role}"
|
|
)
|
|
|
|
return {
|
|
"message": f"Role changed from {old_role} to {request.role}",
|
|
"user": {
|
|
"id": str(target_user.id),
|
|
"email": target_user.email,
|
|
"name": f"{target_user.first_name} {target_user.last_name}",
|
|
"old_role": old_role,
|
|
"new_role": target_user.role,
|
|
"changed_by": f"{current_user.first_name} {current_user.last_name}",
|
|
"changed_at": target_user.role_changed_at.isoformat()
|
|
}
|
|
}
|
|
|
|
@api_router.post("/admin/users/{user_id}/resend-verification")
|
|
async def admin_resend_verification(
|
|
user_id: str,
|
|
current_user: User = Depends(require_permission("users.resend_verification")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Admin resends verification email for any user"""
|
|
|
|
user = db.query(User).filter(User.id == user_id).first()
|
|
if not user:
|
|
raise HTTPException(status_code=404, detail="User not found")
|
|
|
|
# Check if email already verified
|
|
if user.email_verified:
|
|
raise HTTPException(status_code=400, detail="User's email is already verified")
|
|
|
|
# Generate new token
|
|
verification_token = secrets.token_urlsafe(32)
|
|
user.email_verification_token = verification_token
|
|
db.commit()
|
|
|
|
# Send verification email
|
|
await send_verification_email(user.email, verification_token)
|
|
|
|
# Log admin action
|
|
logger.info(
|
|
f"Admin {current_user.email} resent verification email to user {user.email}"
|
|
)
|
|
|
|
return {"message": f"Verification email resent to {user.email}"}
|
|
|
|
@api_router.post("/admin/users/{user_id}/upload-photo")
|
|
async def admin_upload_user_profile_photo(
|
|
user_id: str,
|
|
file: UploadFile = File(...),
|
|
current_user: User = Depends(require_permission("users.edit")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Admin uploads profile photo for a specific user"""
|
|
user = db.query(User).filter(User.id == user_id).first()
|
|
if not user:
|
|
raise HTTPException(status_code=404, detail="User not found")
|
|
|
|
r2 = get_r2_storage()
|
|
|
|
# Get storage quota
|
|
storage = db.query(StorageUsage).first()
|
|
if not storage:
|
|
storage = StorageUsage(
|
|
total_bytes_used=0,
|
|
max_bytes_allowed=int(os.getenv('MAX_STORAGE_BYTES', 10737418240))
|
|
)
|
|
db.add(storage)
|
|
db.commit()
|
|
db.refresh(storage)
|
|
|
|
# Get max file size from env
|
|
max_file_size = int(os.getenv('MAX_FILE_SIZE_BYTES', 52428800))
|
|
|
|
# Delete old profile photo if exists
|
|
if user.profile_photo_url:
|
|
old_key = user.profile_photo_url.split('/')[-1]
|
|
old_key = f"profiles/{old_key}"
|
|
try:
|
|
old_size = await r2.get_file_size(old_key)
|
|
await r2.delete_file(old_key)
|
|
storage.total_bytes_used -= old_size
|
|
except:
|
|
pass
|
|
|
|
# Upload new photo
|
|
try:
|
|
public_url, object_key, file_size = await r2.upload_file(
|
|
file=file,
|
|
folder="profiles",
|
|
max_size_bytes=max_file_size
|
|
)
|
|
|
|
user.profile_photo_url = public_url
|
|
storage.total_bytes_used += file_size
|
|
storage.last_updated = datetime.now(timezone.utc)
|
|
|
|
db.commit()
|
|
|
|
logger.info(f"Admin {current_user.email} uploaded profile photo for user {user.email}: {file_size} bytes")
|
|
|
|
return {
|
|
"message": "Profile photo uploaded successfully",
|
|
"profile_photo_url": public_url
|
|
}
|
|
except Exception as e:
|
|
db.rollback()
|
|
raise HTTPException(status_code=500, detail=f"Upload failed: {str(e)}")
|
|
|
|
@api_router.delete("/admin/users/{user_id}/delete-photo")
|
|
async def admin_delete_user_profile_photo(
|
|
user_id: str,
|
|
current_user: User = Depends(require_permission("users.edit")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Admin deletes profile photo for a specific user"""
|
|
user = db.query(User).filter(User.id == user_id).first()
|
|
if not user:
|
|
raise HTTPException(status_code=404, detail="User not found")
|
|
|
|
if not user.profile_photo_url:
|
|
raise HTTPException(status_code=404, detail="User has no profile photo")
|
|
|
|
r2 = get_r2_storage()
|
|
|
|
# Extract object key from URL
|
|
object_key = user.profile_photo_url.split('/')[-1]
|
|
object_key = f"profiles/{object_key}"
|
|
|
|
try:
|
|
# Get file size before deletion for storage tracking
|
|
storage = db.query(StorageUsage).first()
|
|
if storage:
|
|
try:
|
|
file_size = await r2.get_file_size(object_key)
|
|
storage.total_bytes_used -= file_size
|
|
storage.last_updated = datetime.now(timezone.utc)
|
|
except:
|
|
pass
|
|
|
|
# Delete from R2
|
|
await r2.delete_file(object_key)
|
|
|
|
# Remove URL from user record
|
|
user.profile_photo_url = None
|
|
db.commit()
|
|
|
|
logger.info(f"Admin {current_user.email} deleted profile photo for user {user.email}")
|
|
|
|
return {"message": "Profile photo deleted successfully"}
|
|
except Exception as e:
|
|
db.rollback()
|
|
raise HTTPException(status_code=500, detail=f"Delete failed: {str(e)}")
|
|
|
|
# ============================================================
|
|
# User Creation & Invitation Endpoints
|
|
# ============================================================
|
|
|
|
@api_router.post("/admin/users/create")
|
|
async def create_user_directly(
|
|
request: CreateUserRequest,
|
|
current_user: User = Depends(require_permission("users.create")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Create user account directly (without invitation)
|
|
Admin/Superadmin only
|
|
"""
|
|
# Check if email already exists
|
|
existing_user = db.query(User).filter(User.email == request.email).first()
|
|
if existing_user:
|
|
raise HTTPException(status_code=400, detail="Email already registered")
|
|
|
|
# Validate role
|
|
try:
|
|
role_enum = UserRole[request.role]
|
|
except KeyError:
|
|
raise HTTPException(status_code=400, detail=f"Invalid role: {request.role}")
|
|
|
|
# Only superadmin can create superadmin users
|
|
if role_enum == UserRole.superadmin and current_user.role != UserRole.superadmin:
|
|
raise HTTPException(status_code=403, detail="Only superadmin can create superadmin users")
|
|
|
|
# Create user
|
|
new_user = User(
|
|
email=request.email,
|
|
password_hash=get_password_hash(request.password),
|
|
first_name=request.first_name,
|
|
last_name=request.last_name,
|
|
phone=request.phone,
|
|
role=role_enum,
|
|
email_verified=True, # Admin-created users are pre-verified
|
|
status=UserStatus.active if role_enum in [UserRole.admin, UserRole.superadmin] else UserStatus.payment_pending,
|
|
|
|
# Optional member fields
|
|
address=request.address or "",
|
|
city=request.city or "",
|
|
state=request.state or "",
|
|
zipcode=request.zipcode or "",
|
|
date_of_birth=request.date_of_birth or datetime.now(timezone.utc),
|
|
member_since=request.member_since,
|
|
)
|
|
|
|
db.add(new_user)
|
|
db.commit()
|
|
db.refresh(new_user)
|
|
|
|
logger.info(f"Admin {current_user.email} created user: {new_user.email} with role {request.role}")
|
|
|
|
return {
|
|
"message": "User created successfully",
|
|
"user_id": str(new_user.id),
|
|
"email": new_user.email,
|
|
"role": get_user_role_code(new_user)
|
|
}
|
|
|
|
@api_router.post("/admin/users/invite")
|
|
async def send_user_invitation(
|
|
request: InviteUserRequest,
|
|
current_user: User = Depends(require_permission("users.create")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Send email invitation to new user
|
|
Admin/Superadmin only
|
|
"""
|
|
# Check if email already exists
|
|
existing_user = db.query(User).filter(User.email == request.email).first()
|
|
if existing_user:
|
|
raise HTTPException(status_code=400, detail="Email already registered")
|
|
|
|
# Check for pending invitation
|
|
existing_invitation = db.query(UserInvitation).filter(
|
|
UserInvitation.email == request.email,
|
|
UserInvitation.status == InvitationStatus.pending
|
|
).first()
|
|
if existing_invitation:
|
|
raise HTTPException(status_code=400, detail="Pending invitation already exists for this email")
|
|
|
|
# Validate role
|
|
try:
|
|
role_enum = UserRole[request.role]
|
|
except KeyError:
|
|
raise HTTPException(status_code=400, detail=f"Invalid role: {request.role}")
|
|
|
|
# Only superadmin can invite superadmin users
|
|
if role_enum == UserRole.superadmin and current_user.role != UserRole.superadmin:
|
|
raise HTTPException(status_code=403, detail="Only superadmin can invite superadmin users")
|
|
|
|
# Generate secure token
|
|
token = secrets.token_urlsafe(32)
|
|
|
|
# Create invitation (expires in 7 days)
|
|
invitation = UserInvitation(
|
|
email=request.email,
|
|
token=token,
|
|
role=role_enum,
|
|
status=InvitationStatus.pending,
|
|
first_name=request.first_name,
|
|
last_name=request.last_name,
|
|
phone=request.phone,
|
|
invited_by=current_user.id,
|
|
expires_at=datetime.now(timezone.utc) + timedelta(days=7)
|
|
)
|
|
|
|
db.add(invitation)
|
|
db.commit()
|
|
db.refresh(invitation)
|
|
|
|
# Send invitation email
|
|
from email_service import send_invitation_email
|
|
frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000")
|
|
invitation_url = f"{frontend_url}/accept-invitation?token={token}"
|
|
|
|
try:
|
|
await send_invitation_email(
|
|
to_email=request.email,
|
|
inviter_name=f"{current_user.first_name} {current_user.last_name}",
|
|
invitation_url=invitation_url,
|
|
role=request.role
|
|
)
|
|
except Exception as e:
|
|
logger.error(f"Failed to send invitation email: {str(e)}")
|
|
# Continue anyway - admin can resend later
|
|
|
|
logger.info(f"Admin {current_user.email} invited {request.email} as {request.role}")
|
|
|
|
return {
|
|
"message": "Invitation sent successfully",
|
|
"invitation_id": str(invitation.id),
|
|
"email": invitation.email,
|
|
"expires_at": invitation.expires_at.isoformat(),
|
|
"invitation_url": invitation_url
|
|
}
|
|
|
|
@api_router.post("/admin/users/invitations/{invitation_id}/resend")
|
|
async def resend_invitation(
|
|
invitation_id: str,
|
|
current_user: User = Depends(require_permission("users.create")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Resend invitation email (extends expiry by 7 days)
|
|
Admin/Superadmin only
|
|
"""
|
|
invitation = db.query(UserInvitation).filter(UserInvitation.id == invitation_id).first()
|
|
if not invitation:
|
|
raise HTTPException(status_code=404, detail="Invitation not found")
|
|
|
|
if invitation.status != InvitationStatus.pending:
|
|
raise HTTPException(status_code=400, detail=f"Cannot resend invitation with status: {invitation.status.value}")
|
|
|
|
# Extend expiry by 7 days from now
|
|
invitation.expires_at = datetime.now(timezone.utc) + timedelta(days=7)
|
|
db.commit()
|
|
|
|
# Resend email
|
|
from email_service import send_invitation_email
|
|
frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000")
|
|
invitation_url = f"{frontend_url}/accept-invitation?token={invitation.token}"
|
|
|
|
try:
|
|
await send_invitation_email(
|
|
to_email=invitation.email,
|
|
inviter_name=f"{current_user.first_name} {current_user.last_name}",
|
|
invitation_url=invitation_url,
|
|
role=invitation.role.value
|
|
)
|
|
except Exception as e:
|
|
logger.error(f"Failed to resend invitation email: {str(e)}")
|
|
raise HTTPException(status_code=500, detail="Failed to send email")
|
|
|
|
logger.info(f"Admin {current_user.email} resent invitation to {invitation.email}")
|
|
|
|
return {
|
|
"message": "Invitation resent successfully",
|
|
"expires_at": invitation.expires_at.isoformat()
|
|
}
|
|
|
|
@api_router.delete("/admin/users/invitations/{invitation_id}")
|
|
async def revoke_invitation(
|
|
invitation_id: str,
|
|
current_user: User = Depends(require_permission("users.create")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Revoke pending invitation
|
|
Admin/Superadmin only
|
|
"""
|
|
invitation = db.query(UserInvitation).filter(UserInvitation.id == invitation_id).first()
|
|
if not invitation:
|
|
raise HTTPException(status_code=404, detail="Invitation not found")
|
|
|
|
if invitation.status != InvitationStatus.pending:
|
|
raise HTTPException(status_code=400, detail=f"Cannot revoke invitation with status: {invitation.status.value}")
|
|
|
|
invitation.status = InvitationStatus.revoked
|
|
db.commit()
|
|
|
|
logger.info(f"Admin {current_user.email} revoked invitation for {invitation.email}")
|
|
|
|
return {"message": "Invitation revoked successfully"}
|
|
|
|
# ============================================================
|
|
# Public Invitation Endpoints
|
|
# ============================================================
|
|
|
|
@api_router.get("/invitations/verify/{token}")
|
|
async def verify_invitation_token(
|
|
token: str,
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Verify invitation token and return invitation details
|
|
Public endpoint - no authentication required
|
|
"""
|
|
invitation = db.query(UserInvitation).filter(
|
|
UserInvitation.token == token,
|
|
UserInvitation.status == InvitationStatus.pending
|
|
).first()
|
|
|
|
if not invitation:
|
|
raise HTTPException(status_code=404, detail="Invalid or expired invitation token")
|
|
|
|
# Check expiry (handle timezone-naive datetime from DB)
|
|
expires_at_aware = invitation.expires_at.replace(tzinfo=timezone.utc) if invitation.expires_at.tzinfo is None else invitation.expires_at
|
|
if expires_at_aware < datetime.now(timezone.utc):
|
|
invitation.status = InvitationStatus.expired
|
|
db.commit()
|
|
raise HTTPException(status_code=400, detail="Invitation has expired")
|
|
|
|
return {
|
|
"email": invitation.email,
|
|
"role": invitation.role.value,
|
|
"first_name": invitation.first_name,
|
|
"last_name": invitation.last_name,
|
|
"phone": invitation.phone,
|
|
"expires_at": invitation.expires_at.isoformat()
|
|
}
|
|
|
|
@api_router.post("/invitations/accept")
|
|
async def accept_invitation(
|
|
request: AcceptInvitationRequest,
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Accept invitation and create user account
|
|
Public endpoint - no authentication required
|
|
"""
|
|
# Verify invitation
|
|
invitation = db.query(UserInvitation).filter(
|
|
UserInvitation.token == request.token,
|
|
UserInvitation.status == InvitationStatus.pending
|
|
).first()
|
|
|
|
if not invitation:
|
|
raise HTTPException(status_code=404, detail="Invalid or expired invitation token")
|
|
|
|
# Check expiry (handle timezone-naive datetime from DB)
|
|
expires_at_aware = invitation.expires_at.replace(tzinfo=timezone.utc) if invitation.expires_at.tzinfo is None else invitation.expires_at
|
|
if expires_at_aware < datetime.now(timezone.utc):
|
|
invitation.status = InvitationStatus.expired
|
|
db.commit()
|
|
raise HTTPException(status_code=400, detail="Invitation has expired")
|
|
|
|
# Check if email already registered
|
|
existing_user = db.query(User).filter(User.email == invitation.email).first()
|
|
if existing_user:
|
|
raise HTTPException(status_code=400, detail="Email already registered")
|
|
|
|
# Create user account
|
|
new_user = User(
|
|
email=invitation.email,
|
|
password_hash=get_password_hash(request.password),
|
|
first_name=request.first_name,
|
|
last_name=request.last_name,
|
|
phone=request.phone,
|
|
role=invitation.role,
|
|
email_verified=True, # Invited users are pre-verified
|
|
status=UserStatus.active if invitation.role in [UserRole.admin, UserRole.superadmin] else UserStatus.payment_pending,
|
|
|
|
# Optional fields
|
|
address=request.address or "",
|
|
city=request.city or "",
|
|
state=request.state or "",
|
|
zipcode=request.zipcode or "",
|
|
date_of_birth=request.date_of_birth or datetime.now(timezone.utc),
|
|
)
|
|
|
|
db.add(new_user)
|
|
|
|
# Update invitation status
|
|
invitation.status = InvitationStatus.accepted
|
|
invitation.accepted_at = datetime.now(timezone.utc)
|
|
invitation.accepted_by = new_user.id
|
|
|
|
db.commit()
|
|
db.refresh(new_user)
|
|
|
|
# Generate JWT token for auto-login
|
|
access_token = create_access_token(data={"sub": str(new_user.id)})
|
|
|
|
logger.info(f"User {new_user.email} accepted invitation and created account with role {get_user_role_code(new_user)}")
|
|
|
|
return {
|
|
"message": "Invitation accepted successfully",
|
|
"access_token": access_token,
|
|
"token_type": "bearer",
|
|
"user": {
|
|
"id": str(new_user.id),
|
|
"email": new_user.email,
|
|
"first_name": new_user.first_name,
|
|
"last_name": new_user.last_name,
|
|
"role": get_user_role_code(new_user),
|
|
"status": new_user.status.value
|
|
}
|
|
}
|
|
|
|
|
|
# ============================================================
|
|
# CSV IMPORT ENDPOINTS
|
|
# Note: Export endpoint has been moved above {user_id} route
|
|
# ============================================================
|
|
|
|
@api_router.post("/admin/users/import")
|
|
async def import_users_csv(
|
|
file: UploadFile = File(...),
|
|
update_existing: bool = Form(False),
|
|
current_user: User = Depends(require_permission("users.import")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Import users from CSV file
|
|
Admin/Superadmin only
|
|
Requires permission: users.import
|
|
|
|
CSV Format:
|
|
Email,First Name,Last Name,Phone,Role,Status,Address,City,State,Zipcode,Date of Birth,Member Since
|
|
"""
|
|
# Validate file type
|
|
if not file.filename.endswith('.csv'):
|
|
raise HTTPException(status_code=400, detail="Only CSV files are supported")
|
|
|
|
# Read file content
|
|
try:
|
|
contents = await file.read()
|
|
decoded = contents.decode('utf-8')
|
|
except Exception as e:
|
|
raise HTTPException(status_code=400, detail=f"Failed to read CSV file: {str(e)}")
|
|
|
|
# Parse CSV
|
|
csv_reader = csv.DictReader(io.StringIO(decoded))
|
|
|
|
# Validate required columns
|
|
required_columns = {'Email', 'First Name', 'Last Name', 'Phone', 'Role'}
|
|
if not required_columns.issubset(set(csv_reader.fieldnames or [])):
|
|
missing = required_columns - set(csv_reader.fieldnames or [])
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail=f"Missing required columns: {', '.join(missing)}"
|
|
)
|
|
|
|
# Count total rows
|
|
rows = list(csv_reader)
|
|
total_rows = len(rows)
|
|
|
|
# Create import job
|
|
import_job = ImportJob(
|
|
filename=file.filename,
|
|
total_rows=total_rows,
|
|
imported_by=current_user.id,
|
|
status=ImportJobStatus.processing
|
|
)
|
|
db.add(import_job)
|
|
db.commit()
|
|
db.refresh(import_job)
|
|
|
|
# Process rows
|
|
successful_rows = 0
|
|
failed_rows = 0
|
|
errors = []
|
|
|
|
for idx, row in enumerate(rows, start=1):
|
|
try:
|
|
# Validate required fields
|
|
email = row.get('Email', '').strip()
|
|
first_name = row.get('First Name', '').strip()
|
|
last_name = row.get('Last Name', '').strip()
|
|
phone = row.get('Phone', '').strip()
|
|
role_str = row.get('Role', '').strip()
|
|
|
|
if not all([email, first_name, last_name, phone, role_str]):
|
|
raise ValueError("Missing required fields")
|
|
|
|
# Validate email format (basic check)
|
|
if '@' not in email:
|
|
raise ValueError("Invalid email format")
|
|
|
|
# Validate role
|
|
try:
|
|
role_enum = UserRole[role_str.lower()]
|
|
except KeyError:
|
|
raise ValueError(f"Invalid role: {role_str}. Must be one of: guest, member, admin, superadmin")
|
|
|
|
# Only superadmin can import superadmin users
|
|
if role_enum == UserRole.superadmin and current_user.role != UserRole.superadmin:
|
|
raise ValueError("Only superadmin can import superadmin users")
|
|
|
|
# Check if user exists
|
|
existing_user = db.query(User).filter(User.email == email).first()
|
|
|
|
if existing_user:
|
|
if update_existing:
|
|
# Update existing user
|
|
existing_user.first_name = first_name
|
|
existing_user.last_name = last_name
|
|
existing_user.phone = phone
|
|
set_user_role(existing_user, role_enum, db)
|
|
|
|
# Update optional fields if provided
|
|
if row.get('Address'):
|
|
existing_user.address = row['Address'].strip()
|
|
if row.get('City'):
|
|
existing_user.city = row['City'].strip()
|
|
if row.get('State'):
|
|
existing_user.state = row['State'].strip()
|
|
if row.get('Zipcode'):
|
|
existing_user.zipcode = row['Zipcode'].strip()
|
|
if row.get('Status'):
|
|
try:
|
|
existing_user.status = UserStatus[row['Status'].strip().lower()]
|
|
except KeyError:
|
|
pass # Skip invalid status
|
|
if row.get('Date of Birth'):
|
|
try:
|
|
existing_user.date_of_birth = datetime.strptime(row['Date of Birth'].strip(), '%Y-%m-%d')
|
|
except ValueError:
|
|
pass # Skip invalid date
|
|
if row.get('Member Since'):
|
|
try:
|
|
existing_user.member_since = datetime.strptime(row['Member Since'].strip(), '%Y-%m-%d')
|
|
except ValueError:
|
|
pass # Skip invalid date
|
|
|
|
successful_rows += 1
|
|
else:
|
|
# Skip duplicate
|
|
errors.append({
|
|
"row": idx,
|
|
"email": email,
|
|
"error": "Email already exists (use update_existing=true to update)"
|
|
})
|
|
failed_rows += 1
|
|
continue
|
|
else:
|
|
# Create new user
|
|
# Generate temporary password (admin will reset it)
|
|
temp_password = secrets.token_urlsafe(16)
|
|
|
|
new_user = User(
|
|
email=email,
|
|
password_hash=get_password_hash(temp_password),
|
|
first_name=first_name,
|
|
last_name=last_name,
|
|
phone=phone,
|
|
role=role_enum,
|
|
email_verified=True, # Imported users are pre-verified
|
|
status=UserStatus[row.get('Status', 'payment_pending').strip().lower()] if row.get('Status') else UserStatus.payment_pending,
|
|
address=row.get('Address', '').strip(),
|
|
city=row.get('City', '').strip(),
|
|
state=row.get('State', '').strip(),
|
|
zipcode=row.get('Zipcode', '').strip(),
|
|
)
|
|
|
|
# Parse optional dates
|
|
if row.get('Date of Birth'):
|
|
try:
|
|
new_user.date_of_birth = datetime.strptime(row['Date of Birth'].strip(), '%Y-%m-%d')
|
|
except ValueError:
|
|
pass # Use default
|
|
|
|
if row.get('Member Since'):
|
|
try:
|
|
new_user.member_since = datetime.strptime(row['Member Since'].strip(), '%Y-%m-%d')
|
|
except ValueError:
|
|
pass # Leave as None
|
|
|
|
db.add(new_user)
|
|
successful_rows += 1
|
|
|
|
# Commit every 50 rows for performance
|
|
if idx % 50 == 0:
|
|
db.commit()
|
|
|
|
except Exception as e:
|
|
failed_rows += 1
|
|
errors.append({
|
|
"row": idx,
|
|
"email": row.get('Email', 'N/A'),
|
|
"error": str(e)
|
|
})
|
|
continue
|
|
|
|
# Final commit
|
|
db.commit()
|
|
|
|
# Update import job
|
|
import_job.processed_rows = total_rows
|
|
import_job.successful_rows = successful_rows
|
|
import_job.failed_rows = failed_rows
|
|
import_job.errors = errors
|
|
import_job.completed_at = datetime.now(timezone.utc)
|
|
|
|
if failed_rows == 0:
|
|
import_job.status = ImportJobStatus.completed
|
|
elif successful_rows == 0:
|
|
import_job.status = ImportJobStatus.failed
|
|
else:
|
|
import_job.status = ImportJobStatus.partial
|
|
|
|
db.commit()
|
|
db.refresh(import_job)
|
|
|
|
logger.info(f"Admin {current_user.email} imported {successful_rows}/{total_rows} users from CSV")
|
|
|
|
return {
|
|
"message": "Import completed",
|
|
"import_job_id": str(import_job.id),
|
|
"total_rows": total_rows,
|
|
"successful_rows": successful_rows,
|
|
"failed_rows": failed_rows,
|
|
"status": import_job.status.value,
|
|
"errors": errors[:10] # Return first 10 errors only (full list available in job details)
|
|
}
|
|
|
|
|
|
@api_router.get("/admin/users/import-jobs")
|
|
async def get_import_jobs(
|
|
status: Optional[str] = None,
|
|
current_user: User = Depends(require_permission("users.view")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
List all import jobs with optional status filter
|
|
Admin/Superadmin only
|
|
Requires permission: users.view
|
|
"""
|
|
query = db.query(ImportJob)
|
|
|
|
if status:
|
|
try:
|
|
status_enum = ImportJobStatus[status]
|
|
query = query.filter(ImportJob.status == status_enum)
|
|
except KeyError:
|
|
raise HTTPException(status_code=400, detail=f"Invalid status: {status}")
|
|
|
|
jobs = query.order_by(ImportJob.started_at.desc()).all()
|
|
|
|
return [
|
|
{
|
|
"id": str(job.id),
|
|
"filename": job.filename,
|
|
"total_rows": job.total_rows,
|
|
"processed_rows": job.processed_rows,
|
|
"successful_rows": job.successful_rows,
|
|
"failed_rows": job.failed_rows,
|
|
"status": job.status.value,
|
|
"imported_by": str(job.imported_by),
|
|
"started_at": job.started_at.isoformat(),
|
|
"completed_at": job.completed_at.isoformat() if job.completed_at else None,
|
|
"error_count": len(job.errors) if job.errors else 0
|
|
}
|
|
for job in jobs
|
|
]
|
|
|
|
|
|
@api_router.get("/admin/users/import-jobs/{job_id}")
|
|
async def get_import_job_details(
|
|
job_id: str,
|
|
current_user: User = Depends(require_permission("users.view")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Get detailed information about a specific import job
|
|
Admin/Superadmin only
|
|
Requires permission: users.view
|
|
"""
|
|
job = db.query(ImportJob).filter(ImportJob.id == job_id).first()
|
|
if not job:
|
|
raise HTTPException(status_code=404, detail="Import job not found")
|
|
|
|
# Get importer details
|
|
importer = db.query(User).filter(User.id == job.imported_by).first()
|
|
|
|
return {
|
|
"id": str(job.id),
|
|
"filename": job.filename,
|
|
"total_rows": job.total_rows,
|
|
"processed_rows": job.processed_rows,
|
|
"successful_rows": job.successful_rows,
|
|
"failed_rows": job.failed_rows,
|
|
"status": job.status.value,
|
|
"imported_by": {
|
|
"id": str(importer.id),
|
|
"email": importer.email,
|
|
"name": f"{importer.first_name} {importer.last_name}"
|
|
} if importer else None,
|
|
"started_at": job.started_at.isoformat(),
|
|
"completed_at": job.completed_at.isoformat() if job.completed_at else None,
|
|
"errors": job.errors or [] # Full error list
|
|
}
|
|
|
|
|
|
# ============================================================================
|
|
# WordPress CSV Import Endpoints
|
|
# ============================================================================
|
|
|
|
@api_router.post("/admin/import/upload-csv")
|
|
async def upload_wordpress_csv(
|
|
file: UploadFile = File(...),
|
|
current_user: User = Depends(require_permission("users.import")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Upload WordPress CSV, parse, and generate status suggestions.
|
|
|
|
This endpoint:
|
|
1. Validates the CSV file
|
|
2. Uploads to R2 storage
|
|
3. Parses WordPress data (PHP serialized roles, etc.)
|
|
4. Generates smart status suggestions
|
|
5. Creates ImportJob record with status='preview_ready'
|
|
6. Stores preview data in wordpress_metadata field
|
|
|
|
Returns:
|
|
Import job summary with data quality metrics
|
|
|
|
Requires permission: users.import
|
|
"""
|
|
# Validate file type
|
|
if not file.filename.endswith('.csv'):
|
|
raise HTTPException(status_code=400, detail="Only CSV files are supported")
|
|
|
|
# Validate file size (10MB max)
|
|
MAX_FILE_SIZE = 10 * 1024 * 1024 # 10MB
|
|
contents = await file.read()
|
|
if len(contents) > MAX_FILE_SIZE:
|
|
raise HTTPException(status_code=400, detail="File size exceeds 10MB limit")
|
|
|
|
# Save to temporary file for parsing
|
|
import tempfile
|
|
with tempfile.NamedTemporaryFile(mode='wb', delete=False, suffix='.csv') as tmp:
|
|
tmp.write(contents)
|
|
tmp_path = tmp.name
|
|
|
|
try:
|
|
# Fetch existing emails from database to check for duplicates
|
|
existing_emails = set(
|
|
email.lower() for (email,) in db.query(User.email).all()
|
|
)
|
|
logger.info(f"Checking against {len(existing_emails)} existing emails in database")
|
|
|
|
# Parse CSV with WordPress parser
|
|
analysis_result = analyze_csv(tmp_path, existing_emails=existing_emails)
|
|
|
|
# Note: File contents stored in wordpress_metadata, R2 upload optional
|
|
# Could implement R2 upload later if needed for archival purposes
|
|
|
|
# Create ImportJob record
|
|
import_job = ImportJob(
|
|
filename=file.filename,
|
|
file_key=None, # Optional: could add R2 upload later
|
|
total_rows=analysis_result['total_rows'],
|
|
processed_rows=0,
|
|
successful_rows=0,
|
|
failed_rows=0,
|
|
status=ImportJobStatus.preview_ready,
|
|
wordpress_metadata={
|
|
'preview_data': analysis_result['preview_data'],
|
|
'data_quality': analysis_result['data_quality'],
|
|
'valid_rows': analysis_result['valid_rows'],
|
|
'warnings': analysis_result['warnings'],
|
|
'errors': analysis_result['errors']
|
|
},
|
|
imported_by=current_user.id
|
|
)
|
|
|
|
db.add(import_job)
|
|
db.commit()
|
|
db.refresh(import_job)
|
|
|
|
logger.info(f"WordPress CSV uploaded: {import_job.id} by {current_user.email}")
|
|
|
|
return {
|
|
'import_job_id': str(import_job.id),
|
|
'total_rows': analysis_result['total_rows'],
|
|
'valid_rows': analysis_result['valid_rows'],
|
|
'warnings': analysis_result['warnings'],
|
|
'errors': analysis_result['errors'],
|
|
'data_quality': analysis_result['data_quality']
|
|
}
|
|
|
|
except Exception as e:
|
|
logger.error(f"Failed to upload WordPress CSV: {str(e)}")
|
|
raise HTTPException(status_code=500, detail=f"Failed to process CSV: {str(e)}")
|
|
|
|
finally:
|
|
# Clean up temp file
|
|
if os.path.exists(tmp_path):
|
|
os.unlink(tmp_path)
|
|
|
|
|
|
@api_router.get("/admin/import/{job_id}/preview")
|
|
async def get_import_preview(
|
|
job_id: str,
|
|
page: int = 1,
|
|
page_size: int = 50,
|
|
current_user: User = Depends(require_permission("users.view")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Get paginated preview data for WordPress import status review.
|
|
|
|
Returns preview data with suggested status mappings that admins
|
|
can review and override before executing the import.
|
|
|
|
Args:
|
|
job_id: Import job UUID
|
|
page: Page number (1-indexed)
|
|
page_size: Number of rows per page (default 50)
|
|
|
|
Returns:
|
|
Paginated preview data with status suggestions and warnings
|
|
|
|
Requires permission: users.view
|
|
"""
|
|
# Get import job
|
|
job = db.query(ImportJob).filter(ImportJob.id == job_id).first()
|
|
if not job:
|
|
raise HTTPException(status_code=404, detail="Import job not found")
|
|
|
|
# Verify job is in preview_ready status
|
|
if job.status != ImportJobStatus.preview_ready:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail=f"Import job is not in preview_ready status (current: {job.status.value})"
|
|
)
|
|
|
|
# Get preview data from wordpress_metadata
|
|
preview_data = job.wordpress_metadata.get('preview_data', [])
|
|
|
|
# Format for paginated display
|
|
paginated = format_preview_for_display(preview_data, page, page_size)
|
|
|
|
return paginated
|
|
|
|
|
|
@api_router.post("/admin/import/{job_id}/execute")
|
|
async def execute_wordpress_import(
|
|
job_id: str,
|
|
overrides: dict = {},
|
|
options: dict = {},
|
|
current_user: User = Depends(require_permission("users.import")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Execute WordPress import with admin status overrides.
|
|
|
|
Process:
|
|
1. Merge status overrides with suggested mappings
|
|
2. Create users in batches (commit every 20 rows)
|
|
3. Track imported_user_ids for rollback capability
|
|
4. Queue password reset emails (async)
|
|
5. Update import job status
|
|
|
|
Args:
|
|
job_id: Import job UUID
|
|
overrides: Dict mapping row_number to status override
|
|
e.g., {'1': {'status': 'active'}, '5': {'status': 'inactive'}}
|
|
options: Import options
|
|
- send_password_emails: bool (default True)
|
|
- skip_errors: bool (default True)
|
|
|
|
Returns:
|
|
Import results with success/failure counts
|
|
|
|
Requires permission: users.import
|
|
"""
|
|
# Get import job
|
|
job = db.query(ImportJob).filter(ImportJob.id == job_id).first()
|
|
if not job:
|
|
raise HTTPException(status_code=404, detail="Import job not found")
|
|
|
|
# Verify job is in preview_ready status
|
|
if job.status != ImportJobStatus.preview_ready:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail=f"Import job is not in preview_ready status (current: {job.status.value})"
|
|
)
|
|
|
|
# Update job status to processing
|
|
job.status = ImportJobStatus.processing
|
|
db.commit()
|
|
|
|
# Get preview data
|
|
preview_data = job.wordpress_metadata.get('preview_data', [])
|
|
|
|
# Import configuration
|
|
send_password_emails = options.get('send_password_emails', True)
|
|
skip_errors = options.get('skip_errors', True)
|
|
|
|
# Track results
|
|
imported_user_ids = []
|
|
successful_rows = 0
|
|
failed_rows = 0
|
|
errors = []
|
|
|
|
# Generate default password for all imported users
|
|
default_password_hash = get_password_hash(secrets.token_urlsafe(32))
|
|
|
|
try:
|
|
# Process each row
|
|
for idx, row_data in enumerate(preview_data):
|
|
row_num = row_data['row_number']
|
|
|
|
try:
|
|
# Skip rows with critical errors
|
|
if row_data.get('errors') and skip_errors:
|
|
failed_rows += 1
|
|
errors.append({
|
|
'row': row_num,
|
|
'email': row_data.get('email'),
|
|
'error': ', '.join(row_data['errors'])
|
|
})
|
|
continue
|
|
|
|
# Apply status override if provided
|
|
final_status = row_data['suggested_status']
|
|
if str(row_num) in overrides:
|
|
final_status = overrides[str(row_num)].get('status', final_status)
|
|
|
|
# Check if user already exists
|
|
existing_user = db.query(User).filter(User.email == row_data['email']).first()
|
|
if existing_user:
|
|
failed_rows += 1
|
|
errors.append({
|
|
'row': row_num,
|
|
'email': row_data['email'],
|
|
'error': 'User with this email already exists'
|
|
})
|
|
continue
|
|
|
|
# Create user
|
|
new_user = User(
|
|
email=row_data['email'],
|
|
password_hash=default_password_hash,
|
|
first_name=row_data.get('first_name', ''),
|
|
last_name=row_data.get('last_name', ''),
|
|
phone=row_data.get('phone'),
|
|
address='', # WordPress CSV doesn't have address data
|
|
city='',
|
|
state='',
|
|
zipcode='',
|
|
date_of_birth=row_data.get('date_of_birth'),
|
|
status=UserStatus[final_status],
|
|
role=UserRole[row_data['suggested_role']],
|
|
newsletter_subscribed=row_data.get('newsletter_consent', False),
|
|
email_verified=True, # WordPress users are pre-verified
|
|
import_source='wordpress',
|
|
import_job_id=job.id,
|
|
wordpress_user_id=row_data.get('wordpress_user_id'),
|
|
wordpress_registered_date=row_data.get('wordpress_registered')
|
|
)
|
|
|
|
db.add(new_user)
|
|
db.flush() # Flush to get the ID without committing
|
|
imported_user_ids.append(str(new_user.id))
|
|
successful_rows += 1
|
|
|
|
# Commit in batches of 20
|
|
if (idx + 1) % 20 == 0:
|
|
db.commit()
|
|
job.processed_rows = idx + 1
|
|
db.commit()
|
|
|
|
except Exception as e:
|
|
logger.error(f"Failed to import row {row_num}: {str(e)}")
|
|
failed_rows += 1
|
|
errors.append({
|
|
'row': row_num,
|
|
'email': row_data.get('email', ''),
|
|
'error': str(e)
|
|
})
|
|
if not skip_errors:
|
|
db.rollback()
|
|
raise HTTPException(status_code=500, detail=f"Import failed at row {row_num}: {str(e)}")
|
|
|
|
# Final commit
|
|
db.commit()
|
|
|
|
# Update import job
|
|
job.processed_rows = len(preview_data)
|
|
job.successful_rows = successful_rows
|
|
job.failed_rows = failed_rows
|
|
job.status = ImportJobStatus.completed if failed_rows == 0 else ImportJobStatus.partial
|
|
job.imported_user_ids = imported_user_ids
|
|
job.error_log = errors
|
|
job.completed_at = datetime.now(timezone.utc)
|
|
db.commit()
|
|
|
|
# Queue password reset emails (async, non-blocking)
|
|
password_emails_queued = 0
|
|
if send_password_emails and imported_user_ids:
|
|
try:
|
|
for user_id_str in imported_user_ids:
|
|
try:
|
|
# Convert to UUID and fetch user
|
|
user_uuid = uuid.UUID(user_id_str)
|
|
user = db.query(User).filter(User.id == user_uuid).first()
|
|
|
|
if user:
|
|
# Generate password reset token
|
|
reset_token = create_password_reset_token(user.email)
|
|
reset_url = f"{os.getenv('FRONTEND_URL')}/reset-password?token={reset_token}"
|
|
|
|
# Send email (async)
|
|
await send_password_reset_email(user.email, user.first_name, reset_url)
|
|
password_emails_queued += 1
|
|
except (ValueError, AttributeError) as e:
|
|
logger.warning(f"Skipping invalid user ID: {user_id_str}")
|
|
continue
|
|
except Exception as e:
|
|
logger.error(f"Failed to send password reset emails: {str(e)}")
|
|
# Don't fail import if emails fail
|
|
|
|
logger.info(f"Import executed: {job.id} - {successful_rows}/{len(preview_data)} by {current_user.email}")
|
|
|
|
return {
|
|
'successful_rows': successful_rows,
|
|
'failed_rows': failed_rows,
|
|
'imported_user_ids': imported_user_ids,
|
|
'password_emails_queued': password_emails_queued,
|
|
'errors': errors
|
|
}
|
|
|
|
except Exception as e:
|
|
db.rollback()
|
|
job.status = ImportJobStatus.failed
|
|
job.error_log = [{'error': str(e)}]
|
|
db.commit()
|
|
logger.error(f"Import execution failed: {str(e)}")
|
|
raise HTTPException(status_code=500, detail=f"Import execution failed: {str(e)}")
|
|
|
|
|
|
@api_router.post("/admin/import/{job_id}/rollback")
|
|
async def rollback_import_job(
|
|
job_id: str,
|
|
confirm: bool = False,
|
|
current_user: User = Depends(require_permission("users.import")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Delete all users from a specific import job (full rollback).
|
|
|
|
Safety checks:
|
|
- Requires confirm=True parameter
|
|
- Verifies job status is completed or partial
|
|
- Cannot rollback twice (checks rollback_at is None)
|
|
- Logs action to import_rollback_audit table
|
|
|
|
Args:
|
|
job_id: Import job UUID
|
|
confirm: Must be True to execute rollback
|
|
|
|
Returns:
|
|
Number of deleted users and confirmation message
|
|
|
|
Requires permission: users.import
|
|
"""
|
|
# Safety check: require explicit confirmation
|
|
if not confirm:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail="Rollback requires confirm=true parameter"
|
|
)
|
|
|
|
# Get import job
|
|
job = db.query(ImportJob).filter(ImportJob.id == job_id).first()
|
|
if not job:
|
|
raise HTTPException(status_code=404, detail="Import job not found")
|
|
|
|
# Verify job can be rolled back
|
|
if job.status not in [ImportJobStatus.completed, ImportJobStatus.partial]:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail=f"Cannot rollback import with status: {job.status.value}"
|
|
)
|
|
|
|
if job.rollback_at:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail="Import has already been rolled back"
|
|
)
|
|
|
|
# Get imported user IDs
|
|
imported_user_ids = job.imported_user_ids or []
|
|
if not imported_user_ids:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail="No users to rollback (imported_user_ids is empty)"
|
|
)
|
|
|
|
try:
|
|
# Delete all imported users
|
|
deleted_count = db.query(User).filter(
|
|
User.id.in_([uuid.UUID(uid) for uid in imported_user_ids])
|
|
).delete(synchronize_session=False)
|
|
|
|
# Update import job
|
|
job.status = ImportJobStatus.rolled_back
|
|
job.rollback_at = datetime.now(timezone.utc)
|
|
job.rollback_by = current_user.id
|
|
|
|
# Create audit record
|
|
from models import ImportRollbackAudit
|
|
audit = ImportRollbackAudit(
|
|
import_job_id=job.id,
|
|
rolled_back_by=current_user.id,
|
|
deleted_user_count=deleted_count,
|
|
deleted_user_ids=imported_user_ids,
|
|
reason="Manual rollback by admin"
|
|
)
|
|
db.add(audit)
|
|
|
|
db.commit()
|
|
|
|
logger.warning(f"Import rolled back: {job.id} - {deleted_count} users deleted by {current_user.email}")
|
|
|
|
return {
|
|
'deleted_users': deleted_count,
|
|
'message': f'Import rolled back successfully. {deleted_count} users deleted.'
|
|
}
|
|
|
|
except Exception as e:
|
|
db.rollback()
|
|
logger.error(f"Rollback failed for job {job.id}: {str(e)}")
|
|
raise HTTPException(status_code=500, detail=f"Rollback failed: {str(e)}")
|
|
|
|
|
|
@api_router.get("/admin/import/{job_id}/status")
|
|
async def get_import_status(
|
|
job_id: str,
|
|
current_user: User = Depends(require_permission("users.view")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Get real-time import progress status for polling.
|
|
|
|
Use this endpoint to poll for import progress updates
|
|
while the import is executing.
|
|
|
|
Args:
|
|
job_id: Import job UUID
|
|
|
|
Returns:
|
|
Current import status with progress percentage
|
|
|
|
Requires permission: users.view
|
|
"""
|
|
job = db.query(ImportJob).filter(ImportJob.id == job_id).first()
|
|
if not job:
|
|
raise HTTPException(status_code=404, detail="Import job not found")
|
|
|
|
progress_percent = 0.0
|
|
if job.total_rows > 0:
|
|
progress_percent = (job.processed_rows / job.total_rows) * 100
|
|
|
|
return {
|
|
'status': job.status.value,
|
|
'processed_rows': job.processed_rows,
|
|
'total_rows': job.total_rows,
|
|
'progress_percent': round(progress_percent, 1),
|
|
'successful_rows': job.successful_rows,
|
|
'failed_rows': job.failed_rows
|
|
}
|
|
|
|
|
|
@api_router.get("/admin/import/{job_id}/errors/download")
|
|
async def download_error_report(
|
|
job_id: str,
|
|
current_user: User = Depends(require_permission("users.view")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Download CSV report with all import errors.
|
|
|
|
CSV columns: Row Number, Email, Error Type, Error Message, Original Data
|
|
|
|
Args:
|
|
job_id: Import job UUID
|
|
|
|
Returns:
|
|
StreamingResponse with CSV file
|
|
|
|
Requires permission: users.view
|
|
"""
|
|
job = db.query(ImportJob).filter(ImportJob.id == job_id).first()
|
|
if not job:
|
|
raise HTTPException(status_code=404, detail="Import job not found")
|
|
|
|
errors = job.error_log or []
|
|
if not errors:
|
|
raise HTTPException(status_code=404, detail="No errors found for this import job")
|
|
|
|
# Generate CSV
|
|
output = io.StringIO()
|
|
writer = csv.DictWriter(output, fieldnames=['Row Number', 'Email', 'Error Type', 'Error Message'])
|
|
writer.writeheader()
|
|
|
|
for error in errors:
|
|
writer.writerow({
|
|
'Row Number': error.get('row', ''),
|
|
'Email': error.get('email', ''),
|
|
'Error Type': 'Import Error',
|
|
'Error Message': error.get('error', '')
|
|
})
|
|
|
|
# Return as streaming response
|
|
output.seek(0)
|
|
return StreamingResponse(
|
|
iter([output.getvalue()]),
|
|
media_type="text/csv",
|
|
headers={"Content-Disposition": f"attachment; filename=import_errors_{job_id}.csv"}
|
|
)
|
|
|
|
|
|
@api_router.post("/admin/events", response_model=EventResponse)
|
|
async def create_event(
|
|
request: EventCreate,
|
|
current_user: User = Depends(require_permission("events.create")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
event = Event(
|
|
title=request.title,
|
|
description=request.description,
|
|
start_at=request.start_at,
|
|
end_at=request.end_at,
|
|
location=request.location,
|
|
capacity=request.capacity,
|
|
published=request.published,
|
|
created_by=current_user.id
|
|
)
|
|
|
|
db.add(event)
|
|
db.commit()
|
|
db.refresh(event)
|
|
|
|
logger.info(f"Event created: {event.title} by {current_user.email}")
|
|
|
|
return EventResponse(
|
|
id=str(event.id),
|
|
title=event.title,
|
|
description=event.description,
|
|
start_at=event.start_at,
|
|
end_at=event.end_at,
|
|
location=event.location,
|
|
capacity=event.capacity,
|
|
published=event.published,
|
|
created_by=str(event.created_by),
|
|
created_at=event.created_at,
|
|
rsvp_count=0
|
|
)
|
|
|
|
@api_router.put("/admin/events/{event_id}")
|
|
async def update_event(
|
|
event_id: str,
|
|
request: EventUpdate,
|
|
current_user: User = Depends(require_permission("events.edit")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
event = db.query(Event).filter(Event.id == event_id).first()
|
|
if not event:
|
|
raise HTTPException(status_code=404, detail="Event not found")
|
|
|
|
if request.title:
|
|
event.title = request.title
|
|
if request.description is not None:
|
|
event.description = request.description
|
|
if request.start_at:
|
|
event.start_at = request.start_at
|
|
if request.end_at:
|
|
event.end_at = request.end_at
|
|
if request.location:
|
|
event.location = request.location
|
|
if request.capacity is not None:
|
|
event.capacity = request.capacity
|
|
if request.published is not None:
|
|
event.published = request.published
|
|
|
|
event.updated_at = datetime.now(timezone.utc)
|
|
|
|
db.commit()
|
|
db.refresh(event)
|
|
|
|
return {"message": "Event updated successfully"}
|
|
|
|
@api_router.get("/admin/events/{event_id}", response_model=EventResponse)
|
|
async def get_admin_event(
|
|
event_id: str,
|
|
current_user: User = Depends(require_permission("events.view")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Get single event details (admin) - allows viewing unpublished events"""
|
|
event = db.query(Event).filter(Event.id == event_id).first()
|
|
if not event:
|
|
raise HTTPException(status_code=404, detail="Event not found")
|
|
|
|
rsvp_count = db.query(EventRSVP).filter(
|
|
EventRSVP.event_id == event.id,
|
|
EventRSVP.rsvp_status == RSVPStatus.yes
|
|
).count()
|
|
|
|
return EventResponse(
|
|
id=str(event.id),
|
|
title=event.title,
|
|
description=event.description,
|
|
start_at=event.start_at,
|
|
end_at=event.end_at,
|
|
location=event.location,
|
|
capacity=event.capacity,
|
|
published=event.published,
|
|
created_by=str(event.created_by),
|
|
created_at=event.created_at,
|
|
rsvp_count=rsvp_count,
|
|
user_rsvp_status=None
|
|
)
|
|
|
|
@api_router.get("/admin/events/{event_id}/rsvps")
|
|
async def get_event_rsvps(
|
|
event_id: str,
|
|
current_user: User = Depends(require_permission("events.rsvps")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
event = db.query(Event).filter(Event.id == event_id).first()
|
|
if not event:
|
|
raise HTTPException(status_code=404, detail="Event not found")
|
|
|
|
rsvps = db.query(EventRSVP).filter(EventRSVP.event_id == event_id).all()
|
|
|
|
result = []
|
|
for rsvp in rsvps:
|
|
user = db.query(User).filter(User.id == rsvp.user_id).first()
|
|
result.append({
|
|
"id": str(rsvp.id),
|
|
"user_id": str(rsvp.user_id),
|
|
"user_name": f"{user.first_name} {user.last_name}",
|
|
"user_email": user.email,
|
|
"rsvp_status": rsvp.rsvp_status.value,
|
|
"attended": rsvp.attended,
|
|
"attended_at": rsvp.attended_at.isoformat() if rsvp.attended_at else None
|
|
})
|
|
|
|
return result
|
|
|
|
@api_router.put("/admin/events/{event_id}/attendance")
|
|
async def mark_attendance(
|
|
event_id: str,
|
|
request: BatchAttendanceUpdate,
|
|
current_user: User = Depends(require_permission("events.attendance")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Mark attendance for one or more users (supports batch updates)"""
|
|
event = db.query(Event).filter(Event.id == event_id).first()
|
|
if not event:
|
|
raise HTTPException(status_code=404, detail="Event not found")
|
|
|
|
updated_count = 0
|
|
|
|
# Process each update in the batch
|
|
for update in request.updates:
|
|
rsvp = db.query(EventRSVP).filter(
|
|
EventRSVP.event_id == event_id,
|
|
EventRSVP.user_id == update.user_id
|
|
).first()
|
|
|
|
# Auto-create RSVP if it doesn't exist (for retroactive attendance marking)
|
|
if not rsvp:
|
|
rsvp = EventRSVP(
|
|
event_id=event_id,
|
|
user_id=update.user_id,
|
|
rsvp_status=RSVPStatus.yes, # Default to 'yes' for attended events
|
|
attended=False,
|
|
created_at=datetime.now(timezone.utc),
|
|
updated_at=datetime.now(timezone.utc)
|
|
)
|
|
db.add(rsvp)
|
|
db.flush() # Get the ID without committing
|
|
|
|
rsvp.attended = update.attended
|
|
rsvp.attended_at = datetime.now(timezone.utc) if update.attended else None
|
|
rsvp.updated_at = datetime.now(timezone.utc)
|
|
|
|
# If user attended and they were pending validation, update their status
|
|
if update.attended:
|
|
user = db.query(User).filter(User.id == update.user_id).first()
|
|
if user and user.status == UserStatus.pending_validation:
|
|
user.status = UserStatus.pre_validated
|
|
user.updated_at = datetime.now(timezone.utc)
|
|
|
|
updated_count += 1
|
|
|
|
db.commit()
|
|
|
|
return {"message": f"Attendance marked successfully for {updated_count} {'person' if updated_count == 1 else 'people'}"}
|
|
|
|
@api_router.get("/admin/events")
|
|
async def get_admin_events(
|
|
current_user: User = Depends(require_permission("events.view")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Get all events for admin (including unpublished)"""
|
|
events = db.query(Event).order_by(Event.start_at.desc()).all()
|
|
|
|
result = []
|
|
for event in events:
|
|
rsvp_count = db.query(EventRSVP).filter(
|
|
EventRSVP.event_id == event.id,
|
|
EventRSVP.rsvp_status == RSVPStatus.yes
|
|
).count()
|
|
|
|
result.append({
|
|
"id": str(event.id),
|
|
"title": event.title,
|
|
"description": event.description,
|
|
"start_at": event.start_at,
|
|
"end_at": event.end_at,
|
|
"location": event.location,
|
|
"capacity": event.capacity,
|
|
"published": event.published,
|
|
"created_by": str(event.created_by),
|
|
"created_at": event.created_at,
|
|
"rsvp_count": rsvp_count
|
|
})
|
|
|
|
return result
|
|
|
|
@api_router.delete("/admin/events/{event_id}")
|
|
async def delete_event(
|
|
event_id: str,
|
|
current_user: User = Depends(require_permission("events.delete")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Delete an event (cascade deletes RSVPs)"""
|
|
event = db.query(Event).filter(Event.id == event_id).first()
|
|
if not event:
|
|
raise HTTPException(status_code=404, detail="Event not found")
|
|
|
|
db.delete(event)
|
|
db.commit()
|
|
|
|
return {"message": "Event deleted successfully"}
|
|
|
|
# ==================== PAYMENT & SUBSCRIPTION ENDPOINTS ====================
|
|
|
|
# Pydantic model for checkout request
|
|
class CheckoutRequest(BaseModel):
|
|
plan_id: str
|
|
amount_cents: int = Field(..., ge=3000, description="Total amount in cents (minimum $30)")
|
|
|
|
@validator('amount_cents')
|
|
def validate_amount(cls, v):
|
|
if v < 3000:
|
|
raise ValueError('Amount must be at least $30 (3000 cents)')
|
|
return v
|
|
|
|
# Pydantic model for plan CRUD
|
|
class PlanCreateRequest(BaseModel):
|
|
name: str = Field(min_length=1, max_length=100)
|
|
description: Optional[str] = Field(None, max_length=500)
|
|
price_cents: int = Field(ge=0, le=100000000) # Legacy field, kept for backward compatibility
|
|
billing_cycle: Literal["monthly", "quarterly", "yearly", "lifetime", "custom"]
|
|
stripe_price_id: Optional[str] = None # Deprecated, no longer required
|
|
active: bool = True
|
|
|
|
# Custom billing cycle fields (for recurring date ranges like Jan 1 - Dec 31)
|
|
custom_cycle_enabled: bool = False
|
|
custom_cycle_start_month: Optional[int] = Field(None, ge=1, le=12)
|
|
custom_cycle_start_day: Optional[int] = Field(None, ge=1, le=31)
|
|
custom_cycle_end_month: Optional[int] = Field(None, ge=1, le=12)
|
|
custom_cycle_end_day: Optional[int] = Field(None, ge=1, le=31)
|
|
|
|
# Dynamic pricing fields
|
|
minimum_price_cents: int = Field(3000, ge=3000, le=100000000) # $30 minimum
|
|
suggested_price_cents: Optional[int] = Field(None, ge=3000, le=100000000)
|
|
allow_donation: bool = True
|
|
|
|
@validator('name')
|
|
def validate_name(cls, v):
|
|
if not v.strip():
|
|
raise ValueError('Name cannot be empty or whitespace')
|
|
return v.strip()
|
|
|
|
@validator('custom_cycle_start_month', 'custom_cycle_end_month')
|
|
def validate_months(cls, v):
|
|
if v is not None and (v < 1 or v > 12):
|
|
raise ValueError('Month must be between 1 and 12')
|
|
return v
|
|
|
|
@validator('custom_cycle_start_day', 'custom_cycle_end_day')
|
|
def validate_days(cls, v):
|
|
if v is not None and (v < 1 or v > 31):
|
|
raise ValueError('Day must be between 1 and 31')
|
|
return v
|
|
|
|
@validator('suggested_price_cents')
|
|
def validate_suggested_price(cls, v, values):
|
|
if v is not None and 'minimum_price_cents' in values:
|
|
if v < values['minimum_price_cents']:
|
|
raise ValueError('Suggested price must be >= minimum price')
|
|
return v
|
|
|
|
# Pydantic model for updating subscriptions
|
|
class UpdateSubscriptionRequest(BaseModel):
|
|
status: Optional[str] = Field(None, pattern="^(active|expired|cancelled)$")
|
|
end_date: Optional[datetime] = None
|
|
|
|
# Pydantic model for donation checkout
|
|
class DonationCheckoutRequest(BaseModel):
|
|
amount_cents: int = Field(..., ge=100, description="Donation amount in cents (minimum $1.00)")
|
|
|
|
@validator('amount_cents')
|
|
def validate_amount(cls, v):
|
|
if v < 100:
|
|
raise ValueError('Donation must be at least $1.00 (100 cents)')
|
|
return v
|
|
|
|
# Pydantic model for contact form
|
|
class ContactFormRequest(BaseModel):
|
|
first_name: str = Field(..., min_length=1, max_length=100)
|
|
last_name: str = Field(..., min_length=1, max_length=100)
|
|
email: str = Field(..., min_length=1, max_length=255)
|
|
subject: str = Field(..., min_length=1, max_length=200)
|
|
message: str = Field(..., min_length=1, max_length=2000)
|
|
|
|
@validator('email')
|
|
def validate_email(cls, v):
|
|
import re
|
|
email_regex = r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$'
|
|
if not re.match(email_regex, v):
|
|
raise ValueError('Invalid email address')
|
|
return v
|
|
|
|
@api_router.get("/subscriptions/plans")
|
|
async def get_subscription_plans(db: Session = Depends(get_db)):
|
|
"""Get all active subscription plans."""
|
|
plans = db.query(SubscriptionPlan).filter(SubscriptionPlan.active == True).all()
|
|
return plans
|
|
|
|
# ==================== ADMIN PLAN CRUD ENDPOINTS ====================
|
|
|
|
@api_router.get("/admin/subscriptions/plans")
|
|
async def get_all_plans_admin(
|
|
current_user: User = Depends(require_permission("subscriptions.view")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Get all subscription plans for admin (including inactive) with subscriber counts."""
|
|
plans = db.query(SubscriptionPlan).order_by(SubscriptionPlan.created_at.desc()).all()
|
|
|
|
result = []
|
|
for plan in plans:
|
|
subscriber_count = db.query(Subscription).filter(
|
|
Subscription.plan_id == plan.id,
|
|
Subscription.status == SubscriptionStatus.active
|
|
).count()
|
|
|
|
result.append({
|
|
"id": str(plan.id),
|
|
"name": plan.name,
|
|
"description": plan.description,
|
|
"price_cents": plan.price_cents,
|
|
"billing_cycle": plan.billing_cycle,
|
|
"stripe_price_id": plan.stripe_price_id,
|
|
"active": plan.active,
|
|
"subscriber_count": subscriber_count,
|
|
"created_at": plan.created_at,
|
|
"updated_at": plan.updated_at
|
|
})
|
|
|
|
return result
|
|
|
|
@api_router.get("/admin/subscriptions/plans/{plan_id}")
|
|
async def get_plan_admin(
|
|
plan_id: str,
|
|
current_user: User = Depends(require_permission("subscriptions.view")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Get single plan details with subscriber count."""
|
|
plan = db.query(SubscriptionPlan).filter(SubscriptionPlan.id == plan_id).first()
|
|
|
|
if not plan:
|
|
raise HTTPException(status_code=404, detail="Plan not found")
|
|
|
|
subscriber_count = db.query(Subscription).filter(
|
|
Subscription.plan_id == plan.id,
|
|
Subscription.status == SubscriptionStatus.active
|
|
).count()
|
|
|
|
return {
|
|
"id": str(plan.id),
|
|
"name": plan.name,
|
|
"description": plan.description,
|
|
"price_cents": plan.price_cents,
|
|
"billing_cycle": plan.billing_cycle,
|
|
"stripe_price_id": plan.stripe_price_id,
|
|
"active": plan.active,
|
|
"subscriber_count": subscriber_count,
|
|
"created_at": plan.created_at,
|
|
"updated_at": plan.updated_at
|
|
}
|
|
|
|
@api_router.post("/admin/subscriptions/plans")
|
|
async def create_plan(
|
|
request: PlanCreateRequest,
|
|
current_user: User = Depends(require_permission("subscriptions.plans")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Create new subscription plan."""
|
|
# Check for duplicate name
|
|
existing = db.query(SubscriptionPlan).filter(
|
|
SubscriptionPlan.name == request.name
|
|
).first()
|
|
if existing:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail="A plan with this name already exists"
|
|
)
|
|
|
|
# Validate custom cycle dates if enabled
|
|
if request.custom_cycle_enabled:
|
|
if not all([
|
|
request.custom_cycle_start_month,
|
|
request.custom_cycle_start_day,
|
|
request.custom_cycle_end_month,
|
|
request.custom_cycle_end_day
|
|
]):
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail="All custom cycle date fields must be provided when custom_cycle_enabled is true"
|
|
)
|
|
|
|
plan = SubscriptionPlan(
|
|
name=request.name,
|
|
description=request.description,
|
|
price_cents=request.price_cents, # Legacy field
|
|
billing_cycle=request.billing_cycle,
|
|
stripe_price_id=request.stripe_price_id, # Deprecated
|
|
active=request.active,
|
|
# Custom billing cycle fields
|
|
custom_cycle_enabled=request.custom_cycle_enabled,
|
|
custom_cycle_start_month=request.custom_cycle_start_month,
|
|
custom_cycle_start_day=request.custom_cycle_start_day,
|
|
custom_cycle_end_month=request.custom_cycle_end_month,
|
|
custom_cycle_end_day=request.custom_cycle_end_day,
|
|
# Dynamic pricing fields
|
|
minimum_price_cents=request.minimum_price_cents,
|
|
suggested_price_cents=request.suggested_price_cents,
|
|
allow_donation=request.allow_donation
|
|
)
|
|
|
|
db.add(plan)
|
|
db.commit()
|
|
db.refresh(plan)
|
|
|
|
logger.info(f"Admin {current_user.email} created plan: {plan.name}")
|
|
|
|
return {
|
|
"id": str(plan.id),
|
|
"name": plan.name,
|
|
"description": plan.description,
|
|
"price_cents": plan.price_cents,
|
|
"billing_cycle": plan.billing_cycle,
|
|
"stripe_price_id": plan.stripe_price_id,
|
|
"active": plan.active,
|
|
"custom_cycle_enabled": plan.custom_cycle_enabled,
|
|
"custom_cycle_start_month": plan.custom_cycle_start_month,
|
|
"custom_cycle_start_day": plan.custom_cycle_start_day,
|
|
"custom_cycle_end_month": plan.custom_cycle_end_month,
|
|
"custom_cycle_end_day": plan.custom_cycle_end_day,
|
|
"minimum_price_cents": plan.minimum_price_cents,
|
|
"suggested_price_cents": plan.suggested_price_cents,
|
|
"allow_donation": plan.allow_donation,
|
|
"subscriber_count": 0,
|
|
"created_at": plan.created_at,
|
|
"updated_at": plan.updated_at
|
|
}
|
|
|
|
@api_router.put("/admin/subscriptions/plans/{plan_id}")
|
|
async def update_plan(
|
|
plan_id: str,
|
|
request: PlanCreateRequest,
|
|
current_user: User = Depends(require_permission("subscriptions.plans")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Update subscription plan."""
|
|
plan = db.query(SubscriptionPlan).filter(SubscriptionPlan.id == plan_id).first()
|
|
|
|
if not plan:
|
|
raise HTTPException(status_code=404, detail="Plan not found")
|
|
|
|
# Check for duplicate name (excluding current plan)
|
|
existing = db.query(SubscriptionPlan).filter(
|
|
SubscriptionPlan.name == request.name,
|
|
SubscriptionPlan.id != plan_id
|
|
).first()
|
|
if existing:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail="A plan with this name already exists"
|
|
)
|
|
|
|
# Validate custom cycle dates if enabled
|
|
if request.custom_cycle_enabled:
|
|
if not all([
|
|
request.custom_cycle_start_month,
|
|
request.custom_cycle_start_day,
|
|
request.custom_cycle_end_month,
|
|
request.custom_cycle_end_day
|
|
]):
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail="All custom cycle date fields must be provided when custom_cycle_enabled is true"
|
|
)
|
|
|
|
# Update fields
|
|
plan.name = request.name
|
|
plan.description = request.description
|
|
plan.price_cents = request.price_cents # Legacy field
|
|
plan.billing_cycle = request.billing_cycle
|
|
plan.stripe_price_id = request.stripe_price_id # Deprecated
|
|
plan.active = request.active
|
|
# Custom billing cycle fields
|
|
plan.custom_cycle_enabled = request.custom_cycle_enabled
|
|
plan.custom_cycle_start_month = request.custom_cycle_start_month
|
|
plan.custom_cycle_start_day = request.custom_cycle_start_day
|
|
plan.custom_cycle_end_month = request.custom_cycle_end_month
|
|
plan.custom_cycle_end_day = request.custom_cycle_end_day
|
|
# Dynamic pricing fields
|
|
plan.minimum_price_cents = request.minimum_price_cents
|
|
plan.suggested_price_cents = request.suggested_price_cents
|
|
plan.allow_donation = request.allow_donation
|
|
plan.updated_at = datetime.now(timezone.utc)
|
|
|
|
db.commit()
|
|
db.refresh(plan)
|
|
|
|
logger.info(f"Admin {current_user.email} updated plan: {plan.name}")
|
|
|
|
subscriber_count = db.query(Subscription).filter(
|
|
Subscription.plan_id == plan.id,
|
|
Subscription.status == SubscriptionStatus.active
|
|
).count()
|
|
|
|
return {
|
|
"id": str(plan.id),
|
|
"name": plan.name,
|
|
"description": plan.description,
|
|
"price_cents": plan.price_cents,
|
|
"billing_cycle": plan.billing_cycle,
|
|
"stripe_price_id": plan.stripe_price_id,
|
|
"active": plan.active,
|
|
"subscriber_count": subscriber_count,
|
|
"created_at": plan.created_at,
|
|
"updated_at": plan.updated_at
|
|
}
|
|
|
|
@api_router.delete("/admin/subscriptions/plans/{plan_id}")
|
|
async def delete_plan(
|
|
plan_id: str,
|
|
current_user: User = Depends(require_permission("subscriptions.plans")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Soft delete plan (set active = False)."""
|
|
plan = db.query(SubscriptionPlan).filter(SubscriptionPlan.id == plan_id).first()
|
|
|
|
if not plan:
|
|
raise HTTPException(status_code=404, detail="Plan not found")
|
|
|
|
# Check if plan has active subscriptions
|
|
active_subs = db.query(Subscription).filter(
|
|
Subscription.plan_id == plan_id,
|
|
Subscription.status == SubscriptionStatus.active
|
|
).count()
|
|
|
|
if active_subs > 0:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail=f"Cannot delete plan with {active_subs} active subscriptions"
|
|
)
|
|
|
|
plan.active = False
|
|
plan.updated_at = datetime.now(timezone.utc)
|
|
db.commit()
|
|
|
|
logger.info(f"Admin {current_user.email} deactivated plan: {plan.name}")
|
|
|
|
return {"message": "Plan deactivated successfully"}
|
|
|
|
# ============================================================================
|
|
# Admin Subscription Management Routes
|
|
# ============================================================================
|
|
|
|
@api_router.get("/admin/subscriptions")
|
|
async def get_all_subscriptions(
|
|
status: Optional[str] = None,
|
|
plan_id: Optional[str] = None,
|
|
user_id: Optional[str] = None,
|
|
current_user: User = Depends(require_permission("subscriptions.view")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Get all subscriptions with optional filters."""
|
|
# Use explicit join to avoid ambiguous foreign key error
|
|
query = db.query(Subscription).join(Subscription.user).join(Subscription.plan)
|
|
|
|
if status:
|
|
query = query.filter(Subscription.status == status)
|
|
if plan_id:
|
|
query = query.filter(Subscription.plan_id == plan_id)
|
|
if user_id:
|
|
query = query.filter(Subscription.user_id == user_id)
|
|
|
|
subscriptions = query.order_by(Subscription.created_at.desc()).all()
|
|
|
|
return [{
|
|
"id": str(sub.id),
|
|
"user": {
|
|
"id": str(sub.user.id),
|
|
"first_name": sub.user.first_name,
|
|
"last_name": sub.user.last_name,
|
|
"email": sub.user.email
|
|
},
|
|
"plan": {
|
|
"id": str(sub.plan.id),
|
|
"name": sub.plan.name,
|
|
"billing_cycle": sub.plan.billing_cycle
|
|
},
|
|
"status": sub.status.value,
|
|
"start_date": sub.start_date,
|
|
"end_date": sub.end_date,
|
|
"amount_paid_cents": sub.amount_paid_cents,
|
|
"base_subscription_cents": sub.base_subscription_cents,
|
|
"donation_cents": sub.donation_cents,
|
|
"payment_method": sub.payment_method,
|
|
"stripe_subscription_id": sub.stripe_subscription_id,
|
|
"stripe_customer_id": sub.stripe_customer_id,
|
|
"created_at": sub.created_at,
|
|
"updated_at": sub.updated_at,
|
|
# Stripe transaction metadata
|
|
"stripe_payment_intent_id": sub.stripe_payment_intent_id,
|
|
"stripe_charge_id": sub.stripe_charge_id,
|
|
"stripe_invoice_id": sub.stripe_invoice_id,
|
|
"payment_completed_at": sub.payment_completed_at.isoformat() if sub.payment_completed_at else None,
|
|
"card_last4": sub.card_last4,
|
|
"card_brand": sub.card_brand,
|
|
"stripe_receipt_url": sub.stripe_receipt_url
|
|
} for sub in subscriptions]
|
|
|
|
@api_router.get("/admin/subscriptions/stats")
|
|
async def get_subscription_stats(
|
|
current_user: User = Depends(require_permission("subscriptions.view")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Get subscription statistics for admin dashboard."""
|
|
from sqlalchemy import func
|
|
|
|
total = db.query(Subscription).count()
|
|
active = db.query(Subscription).filter(
|
|
Subscription.status == SubscriptionStatus.active
|
|
).count()
|
|
cancelled = db.query(Subscription).filter(
|
|
Subscription.status == SubscriptionStatus.cancelled
|
|
).count()
|
|
expired = db.query(Subscription).filter(
|
|
Subscription.status == SubscriptionStatus.expired
|
|
).count()
|
|
|
|
revenue_data = db.query(
|
|
func.sum(Subscription.amount_paid_cents).label('total_revenue'),
|
|
func.sum(Subscription.base_subscription_cents).label('total_base'),
|
|
func.sum(Subscription.donation_cents).label('total_donations')
|
|
).first()
|
|
|
|
return {
|
|
"total": total,
|
|
"active": active,
|
|
"cancelled": cancelled,
|
|
"expired": expired,
|
|
"total_revenue": revenue_data.total_revenue or 0,
|
|
"total_base": revenue_data.total_base or 0,
|
|
"total_donations": revenue_data.total_donations or 0
|
|
}
|
|
|
|
@api_router.put("/admin/subscriptions/{subscription_id}")
|
|
async def update_subscription(
|
|
subscription_id: str,
|
|
request: UpdateSubscriptionRequest,
|
|
current_user: User = Depends(require_permission("subscriptions.edit")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Update subscription details (status, dates)."""
|
|
subscription = db.query(Subscription).filter(
|
|
Subscription.id == subscription_id
|
|
).first()
|
|
|
|
if not subscription:
|
|
raise HTTPException(status_code=404, detail="Subscription not found")
|
|
|
|
# Update fields if provided
|
|
if request.status:
|
|
subscription.status = SubscriptionStatus[request.status]
|
|
if request.end_date:
|
|
subscription.end_date = request.end_date
|
|
|
|
subscription.updated_at = datetime.now(timezone.utc)
|
|
db.commit()
|
|
db.refresh(subscription)
|
|
|
|
logger.info(f"Admin {current_user.email} updated subscription {subscription_id}")
|
|
|
|
return {
|
|
"id": str(subscription.id),
|
|
"user_id": str(subscription.user_id),
|
|
"plan_id": str(subscription.plan_id),
|
|
"status": subscription.status.value,
|
|
"start_date": subscription.start_date,
|
|
"end_date": subscription.end_date,
|
|
"amount_paid_cents": subscription.amount_paid_cents,
|
|
"updated_at": subscription.updated_at
|
|
}
|
|
|
|
@api_router.post("/admin/subscriptions/{subscription_id}/cancel")
|
|
async def cancel_subscription(
|
|
subscription_id: str,
|
|
current_user: User = Depends(require_permission("subscriptions.cancel")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Cancel a subscription."""
|
|
subscription = db.query(Subscription).filter(
|
|
Subscription.id == subscription_id
|
|
).first()
|
|
|
|
if not subscription:
|
|
raise HTTPException(status_code=404, detail="Subscription not found")
|
|
|
|
subscription.status = SubscriptionStatus.cancelled
|
|
subscription.updated_at = datetime.now(timezone.utc)
|
|
|
|
# Also update user status if currently active
|
|
user = subscription.user
|
|
if user.status == UserStatus.active:
|
|
user.status = UserStatus.inactive
|
|
user.updated_at = datetime.now(timezone.utc)
|
|
|
|
db.commit()
|
|
|
|
logger.info(f"Admin {current_user.email} cancelled subscription {subscription_id} for user {user.email}")
|
|
|
|
return {"message": "Subscription cancelled successfully"}
|
|
|
|
@api_router.get("/admin/subscriptions/export")
|
|
async def export_subscriptions(
|
|
status: Optional[str] = None,
|
|
plan_id: Optional[str] = None,
|
|
search: Optional[str] = None,
|
|
current_user: User = Depends(require_permission("subscriptions.export")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Export subscriptions to CSV for financial records"""
|
|
|
|
# Build query with same logic as get_all_subscriptions
|
|
query = db.query(Subscription).join(Subscription.user).join(Subscription.plan)
|
|
|
|
# Apply filters
|
|
if status:
|
|
query = query.filter(Subscription.status == status)
|
|
if plan_id:
|
|
query = query.filter(Subscription.plan_id == plan_id)
|
|
if search:
|
|
search_term = f"%{search}%"
|
|
query = query.filter(
|
|
(User.first_name.ilike(search_term)) |
|
|
(User.last_name.ilike(search_term)) |
|
|
(User.email.ilike(search_term))
|
|
)
|
|
|
|
subscriptions = query.order_by(Subscription.created_at.desc()).all()
|
|
|
|
# Create CSV
|
|
output = io.StringIO()
|
|
writer = csv.writer(output)
|
|
|
|
# Header row
|
|
writer.writerow([
|
|
'Subscription ID', 'Member Name', 'Email', 'Plan Name', 'Billing Cycle',
|
|
'Status', 'Base Amount', 'Donation Amount', 'Total Amount', 'Payment Method',
|
|
'Start Date', 'End Date', 'Stripe Subscription ID', 'Created At', 'Updated At'
|
|
])
|
|
|
|
# Data rows
|
|
for sub in subscriptions:
|
|
user = sub.user
|
|
plan = sub.plan
|
|
writer.writerow([
|
|
str(sub.id),
|
|
f"{user.first_name} {user.last_name}",
|
|
user.email,
|
|
plan.name,
|
|
plan.billing_cycle,
|
|
sub.status.value,
|
|
f"${sub.base_subscription_cents / 100:.2f}",
|
|
f"${sub.donation_cents / 100:.2f}" if sub.donation_cents else "$0.00",
|
|
f"${sub.amount_paid_cents / 100:.2f}" if sub.amount_paid_cents else "$0.00",
|
|
sub.payment_method or 'Stripe',
|
|
sub.start_date.isoformat() if sub.start_date else '',
|
|
sub.end_date.isoformat() if sub.end_date else '',
|
|
sub.stripe_subscription_id or '',
|
|
sub.created_at.isoformat() if sub.created_at else '',
|
|
sub.updated_at.isoformat() if sub.updated_at else ''
|
|
])
|
|
|
|
# Return CSV
|
|
filename = f"subscriptions_export_{datetime.now().strftime('%Y%m%d_%H%M%S')}.csv"
|
|
return StreamingResponse(
|
|
iter([output.getvalue()]),
|
|
media_type="text/csv",
|
|
headers={"Content-Disposition": f"attachment; filename={filename}"}
|
|
)
|
|
|
|
# ============================================================================
|
|
# Admin Donation Management Routes
|
|
# ============================================================================
|
|
|
|
@api_router.get("/admin/donations")
|
|
async def get_donations(
|
|
donation_type: Optional[str] = None,
|
|
status: Optional[str] = None,
|
|
start_date: Optional[str] = None,
|
|
end_date: Optional[str] = None,
|
|
search: Optional[str] = None,
|
|
current_user: User = Depends(require_permission("donations.view")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Get all donations with optional filters."""
|
|
|
|
query = db.query(Donation).outerjoin(User, Donation.user_id == User.id)
|
|
|
|
# Apply filters
|
|
if donation_type:
|
|
try:
|
|
query = query.filter(Donation.donation_type == DonationType[donation_type])
|
|
except KeyError:
|
|
raise HTTPException(status_code=400, detail=f"Invalid donation type: {donation_type}")
|
|
|
|
if status:
|
|
try:
|
|
query = query.filter(Donation.status == DonationStatus[status])
|
|
except KeyError:
|
|
raise HTTPException(status_code=400, detail=f"Invalid status: {status}")
|
|
|
|
if start_date:
|
|
try:
|
|
start_dt = datetime.fromisoformat(start_date.replace('Z', '+00:00'))
|
|
query = query.filter(Donation.created_at >= start_dt)
|
|
except ValueError:
|
|
raise HTTPException(status_code=400, detail="Invalid start_date format")
|
|
|
|
if end_date:
|
|
try:
|
|
end_dt = datetime.fromisoformat(end_date.replace('Z', '+00:00'))
|
|
query = query.filter(Donation.created_at <= end_dt)
|
|
except ValueError:
|
|
raise HTTPException(status_code=400, detail="Invalid end_date format")
|
|
|
|
if search:
|
|
search_term = f"%{search}%"
|
|
query = query.filter(
|
|
(Donation.donor_email.ilike(search_term)) |
|
|
(Donation.donor_name.ilike(search_term)) |
|
|
(User.first_name.ilike(search_term)) |
|
|
(User.last_name.ilike(search_term))
|
|
)
|
|
|
|
donations = query.order_by(Donation.created_at.desc()).all()
|
|
|
|
return [{
|
|
"id": str(d.id),
|
|
"amount_cents": d.amount_cents,
|
|
"amount": f"${d.amount_cents / 100:.2f}",
|
|
"donation_type": d.donation_type.value,
|
|
"status": d.status.value,
|
|
"donor_name": d.donor_name if d.donation_type == DonationType.public else (f"{d.user.first_name} {d.user.last_name}" if d.user else d.donor_name),
|
|
"donor_email": d.donor_email or (d.user.email if d.user else None),
|
|
"payment_method": d.payment_method,
|
|
"notes": d.notes,
|
|
"created_at": d.created_at.isoformat(),
|
|
# Stripe transaction metadata
|
|
"stripe_payment_intent_id": d.stripe_payment_intent_id,
|
|
"stripe_charge_id": d.stripe_charge_id,
|
|
"stripe_customer_id": d.stripe_customer_id,
|
|
"payment_completed_at": d.payment_completed_at.isoformat() if d.payment_completed_at else None,
|
|
"card_last4": d.card_last4,
|
|
"card_brand": d.card_brand,
|
|
"stripe_receipt_url": d.stripe_receipt_url
|
|
} for d in donations]
|
|
|
|
@api_router.get("/admin/donations/stats")
|
|
async def get_donation_stats(
|
|
current_user: User = Depends(require_permission("donations.view")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Get donation statistics."""
|
|
from sqlalchemy import func
|
|
|
|
# Total donations
|
|
total_donations = db.query(Donation).filter(
|
|
Donation.status == DonationStatus.completed
|
|
).count()
|
|
|
|
# Member donations
|
|
member_donations = db.query(Donation).filter(
|
|
Donation.status == DonationStatus.completed,
|
|
Donation.donation_type == DonationType.member
|
|
).count()
|
|
|
|
# Public donations
|
|
public_donations = db.query(Donation).filter(
|
|
Donation.status == DonationStatus.completed,
|
|
Donation.donation_type == DonationType.public
|
|
).count()
|
|
|
|
# Total amount
|
|
total_amount = db.query(func.sum(Donation.amount_cents)).filter(
|
|
Donation.status == DonationStatus.completed
|
|
).scalar() or 0
|
|
|
|
# This month
|
|
now = datetime.now(timezone.utc)
|
|
this_month_start = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
|
|
this_month_amount = db.query(func.sum(Donation.amount_cents)).filter(
|
|
Donation.status == DonationStatus.completed,
|
|
Donation.created_at >= this_month_start
|
|
).scalar() or 0
|
|
|
|
this_month_count = db.query(Donation).filter(
|
|
Donation.status == DonationStatus.completed,
|
|
Donation.created_at >= this_month_start
|
|
).count()
|
|
|
|
return {
|
|
"total_donations": total_donations,
|
|
"member_donations": member_donations,
|
|
"public_donations": public_donations,
|
|
"total_amount_cents": total_amount,
|
|
"total_amount": f"${total_amount / 100:.2f}",
|
|
"this_month_amount_cents": this_month_amount,
|
|
"this_month_amount": f"${this_month_amount / 100:.2f}",
|
|
"this_month_count": this_month_count
|
|
}
|
|
|
|
@api_router.get("/admin/donations/export")
|
|
async def export_donations(
|
|
donation_type: Optional[str] = None,
|
|
status: Optional[str] = None,
|
|
start_date: Optional[str] = None,
|
|
end_date: Optional[str] = None,
|
|
search: Optional[str] = None,
|
|
current_user: User = Depends(require_permission("donations.export")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Export donations to CSV."""
|
|
import io
|
|
import csv
|
|
from fastapi.responses import StreamingResponse
|
|
|
|
# Build query (same as get_donations)
|
|
query = db.query(Donation).outerjoin(User, Donation.user_id == User.id)
|
|
|
|
if donation_type:
|
|
try:
|
|
query = query.filter(Donation.donation_type == DonationType[donation_type])
|
|
except KeyError:
|
|
raise HTTPException(status_code=400, detail=f"Invalid donation type: {donation_type}")
|
|
|
|
if status:
|
|
try:
|
|
query = query.filter(Donation.status == DonationStatus[status])
|
|
except KeyError:
|
|
raise HTTPException(status_code=400, detail=f"Invalid status: {status}")
|
|
|
|
if start_date:
|
|
try:
|
|
start_dt = datetime.fromisoformat(start_date.replace('Z', '+00:00'))
|
|
query = query.filter(Donation.created_at >= start_dt)
|
|
except ValueError:
|
|
raise HTTPException(status_code=400, detail="Invalid start_date format")
|
|
|
|
if end_date:
|
|
try:
|
|
end_dt = datetime.fromisoformat(end_date.replace('Z', '+00:00'))
|
|
query = query.filter(Donation.created_at <= end_dt)
|
|
except ValueError:
|
|
raise HTTPException(status_code=400, detail="Invalid end_date format")
|
|
|
|
if search:
|
|
search_term = f"%{search}%"
|
|
query = query.filter(
|
|
(Donation.donor_email.ilike(search_term)) |
|
|
(Donation.donor_name.ilike(search_term)) |
|
|
(User.first_name.ilike(search_term)) |
|
|
(User.last_name.ilike(search_term))
|
|
)
|
|
|
|
donations = query.order_by(Donation.created_at.desc()).all()
|
|
|
|
# Create CSV
|
|
output = io.StringIO()
|
|
writer = csv.writer(output)
|
|
|
|
writer.writerow([
|
|
'Donation ID', 'Date', 'Donor Name', 'Donor Email', 'Type',
|
|
'Amount', 'Status', 'Payment Method', 'Stripe Payment Intent',
|
|
'Notes'
|
|
])
|
|
|
|
for d in donations:
|
|
donor_name = d.donor_name if d.donation_type == DonationType.public else (f"{d.user.first_name} {d.user.last_name}" if d.user else d.donor_name)
|
|
donor_email = d.donor_email or (d.user.email if d.user else '')
|
|
|
|
writer.writerow([
|
|
str(d.id),
|
|
d.created_at.strftime('%Y-%m-%d %H:%M:%S'),
|
|
donor_name or '',
|
|
donor_email,
|
|
d.donation_type.value,
|
|
f"${d.amount_cents / 100:.2f}",
|
|
d.status.value,
|
|
d.payment_method or '',
|
|
d.stripe_payment_intent_id or '',
|
|
d.notes or ''
|
|
])
|
|
|
|
filename = f"donations_export_{datetime.now().strftime('%Y%m%d')}.csv"
|
|
return StreamingResponse(
|
|
iter([output.getvalue()]),
|
|
media_type="text/csv",
|
|
headers={"Content-Disposition": f"attachment; filename={filename}"}
|
|
)
|
|
|
|
# ============================================================================
|
|
# Admin Document Management Routes
|
|
# ============================================================================
|
|
|
|
# Newsletter Archive Admin Routes
|
|
@api_router.post("/admin/newsletters")
|
|
async def create_newsletter(
|
|
title: str = Form(...),
|
|
description: str = Form(None),
|
|
published_date: str = Form(...),
|
|
document_type: str = Form("google_docs"),
|
|
document_url: str = Form(None),
|
|
file: Optional[UploadFile] = File(None),
|
|
current_user: User = Depends(require_permission("newsletters.create")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Create newsletter record
|
|
Admin only - supports both URL links and file uploads
|
|
"""
|
|
from models import NewsletterArchive, StorageUsage
|
|
from r2_storage import get_r2_storage
|
|
|
|
final_url = document_url
|
|
file_size = None
|
|
|
|
# If file uploaded, upload to R2
|
|
if file and document_type == 'upload':
|
|
r2 = get_r2_storage()
|
|
public_url, object_key, file_size_bytes = await r2.upload_file(
|
|
file=file,
|
|
folder="newsletters",
|
|
allowed_types=r2.ALLOWED_DOCUMENT_TYPES,
|
|
max_size_bytes=int(os.getenv('MAX_FILE_SIZE_BYTES', 5242880))
|
|
)
|
|
final_url = public_url
|
|
file_size = file_size_bytes
|
|
|
|
# Update storage usage
|
|
storage = db.query(StorageUsage).first()
|
|
if storage:
|
|
storage.total_bytes_used += file_size
|
|
storage.last_updated = datetime.now(timezone.utc)
|
|
db.commit()
|
|
|
|
newsletter = NewsletterArchive(
|
|
title=title,
|
|
description=description,
|
|
published_date=datetime.fromisoformat(published_date.replace('Z', '+00:00')),
|
|
document_url=final_url,
|
|
document_type=document_type,
|
|
file_size_bytes=file_size,
|
|
created_by=current_user.id
|
|
)
|
|
|
|
db.add(newsletter)
|
|
db.commit()
|
|
db.refresh(newsletter)
|
|
|
|
return {
|
|
"id": str(newsletter.id),
|
|
"message": "Newsletter created successfully"
|
|
}
|
|
|
|
@api_router.put("/admin/newsletters/{newsletter_id}")
|
|
async def update_newsletter(
|
|
newsletter_id: str,
|
|
title: str = Form(...),
|
|
description: str = Form(None),
|
|
published_date: str = Form(...),
|
|
document_type: str = Form("google_docs"),
|
|
document_url: str = Form(None),
|
|
file: Optional[UploadFile] = File(None),
|
|
current_user: User = Depends(require_permission("newsletters.edit")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Update newsletter record
|
|
Admin only - supports both URL links and file uploads
|
|
"""
|
|
from models import NewsletterArchive, StorageUsage
|
|
from r2_storage import get_r2_storage
|
|
|
|
newsletter = db.query(NewsletterArchive).filter(
|
|
NewsletterArchive.id == newsletter_id
|
|
).first()
|
|
|
|
if not newsletter:
|
|
raise HTTPException(status_code=404, detail="Newsletter not found")
|
|
|
|
final_url = document_url
|
|
file_size = newsletter.file_size_bytes
|
|
|
|
# If file uploaded, upload to R2
|
|
if file and document_type == 'upload':
|
|
r2 = get_r2_storage()
|
|
public_url, object_key, file_size_bytes = await r2.upload_file(
|
|
file=file,
|
|
folder="newsletters",
|
|
allowed_types=r2.ALLOWED_DOCUMENT_TYPES,
|
|
max_size_bytes=int(os.getenv('MAX_FILE_SIZE_BYTES', 5242880))
|
|
)
|
|
final_url = public_url
|
|
|
|
# Update storage usage (subtract old, add new)
|
|
storage = db.query(StorageUsage).first()
|
|
if storage and newsletter.file_size_bytes:
|
|
storage.total_bytes_used -= newsletter.file_size_bytes
|
|
if storage:
|
|
storage.total_bytes_used += file_size_bytes
|
|
storage.last_updated = datetime.now(timezone.utc)
|
|
db.commit()
|
|
|
|
file_size = file_size_bytes
|
|
|
|
newsletter.title = title
|
|
newsletter.description = description
|
|
newsletter.published_date = datetime.fromisoformat(published_date.replace('Z', '+00:00'))
|
|
newsletter.document_url = final_url
|
|
newsletter.document_type = document_type
|
|
newsletter.file_size_bytes = file_size
|
|
newsletter.updated_at = datetime.now(timezone.utc)
|
|
|
|
db.commit()
|
|
|
|
return {"message": "Newsletter updated successfully"}
|
|
|
|
@api_router.delete("/admin/newsletters/{newsletter_id}")
|
|
async def delete_newsletter(
|
|
newsletter_id: str,
|
|
current_user: User = Depends(require_permission("newsletters.delete")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Delete newsletter record
|
|
Admin only
|
|
"""
|
|
from models import NewsletterArchive
|
|
|
|
newsletter = db.query(NewsletterArchive).filter(
|
|
NewsletterArchive.id == newsletter_id
|
|
).first()
|
|
|
|
if not newsletter:
|
|
raise HTTPException(status_code=404, detail="Newsletter not found")
|
|
|
|
db.delete(newsletter)
|
|
db.commit()
|
|
|
|
return {"message": "Newsletter deleted successfully"}
|
|
|
|
# Financial Reports Admin Routes
|
|
@api_router.post("/admin/financials")
|
|
async def create_financial_report(
|
|
year: int = Form(...),
|
|
title: str = Form(...),
|
|
document_type: str = Form("google_drive"),
|
|
document_url: str = Form(None),
|
|
file: Optional[UploadFile] = File(None),
|
|
current_user: User = Depends(require_permission("financials.create")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Create financial report record
|
|
Admin only - supports both URL links and file uploads
|
|
"""
|
|
from models import FinancialReport, StorageUsage
|
|
from r2_storage import get_r2_storage
|
|
|
|
final_url = document_url
|
|
file_size = None
|
|
|
|
# If file uploaded, upload to R2
|
|
if file and document_type == 'upload':
|
|
r2 = get_r2_storage()
|
|
public_url, object_key, file_size_bytes = await r2.upload_file(
|
|
file=file,
|
|
folder="financials",
|
|
allowed_types=r2.ALLOWED_DOCUMENT_TYPES,
|
|
max_size_bytes=int(os.getenv('MAX_FILE_SIZE_BYTES', 5242880))
|
|
)
|
|
final_url = public_url
|
|
file_size = file_size_bytes
|
|
|
|
# Update storage usage
|
|
storage = db.query(StorageUsage).first()
|
|
if storage:
|
|
storage.total_bytes_used += file_size
|
|
storage.last_updated = datetime.now(timezone.utc)
|
|
db.commit()
|
|
|
|
report = FinancialReport(
|
|
year=year,
|
|
title=title,
|
|
document_url=final_url,
|
|
document_type=document_type,
|
|
file_size_bytes=file_size,
|
|
created_by=current_user.id
|
|
)
|
|
|
|
db.add(report)
|
|
db.commit()
|
|
db.refresh(report)
|
|
|
|
return {
|
|
"id": str(report.id),
|
|
"message": "Financial report created successfully"
|
|
}
|
|
|
|
@api_router.put("/admin/financials/{report_id}")
|
|
async def update_financial_report(
|
|
report_id: str,
|
|
year: int = Form(...),
|
|
title: str = Form(...),
|
|
document_type: str = Form("google_drive"),
|
|
document_url: str = Form(None),
|
|
file: Optional[UploadFile] = File(None),
|
|
current_user: User = Depends(require_permission("financials.edit")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Update financial report record
|
|
Admin only - supports both URL links and file uploads
|
|
"""
|
|
from models import FinancialReport, StorageUsage
|
|
from r2_storage import get_r2_storage
|
|
|
|
report = db.query(FinancialReport).filter(
|
|
FinancialReport.id == report_id
|
|
).first()
|
|
|
|
if not report:
|
|
raise HTTPException(status_code=404, detail="Financial report not found")
|
|
|
|
final_url = document_url
|
|
file_size = report.file_size_bytes
|
|
|
|
# If file uploaded, upload to R2
|
|
if file and document_type == 'upload':
|
|
r2 = get_r2_storage()
|
|
public_url, object_key, file_size_bytes = await r2.upload_file(
|
|
file=file,
|
|
folder="financials",
|
|
allowed_types=r2.ALLOWED_DOCUMENT_TYPES,
|
|
max_size_bytes=int(os.getenv('MAX_FILE_SIZE_BYTES', 5242880))
|
|
)
|
|
final_url = public_url
|
|
|
|
# Update storage usage (subtract old, add new)
|
|
storage = db.query(StorageUsage).first()
|
|
if storage and report.file_size_bytes:
|
|
storage.total_bytes_used -= report.file_size_bytes
|
|
if storage:
|
|
storage.total_bytes_used += file_size_bytes
|
|
storage.last_updated = datetime.now(timezone.utc)
|
|
db.commit()
|
|
|
|
file_size = file_size_bytes
|
|
|
|
report.year = year
|
|
report.title = title
|
|
report.document_url = final_url
|
|
report.document_type = document_type
|
|
report.file_size_bytes = file_size
|
|
report.updated_at = datetime.now(timezone.utc)
|
|
|
|
db.commit()
|
|
|
|
return {"message": "Financial report updated successfully"}
|
|
|
|
@api_router.delete("/admin/financials/{report_id}")
|
|
async def delete_financial_report(
|
|
report_id: str,
|
|
current_user: User = Depends(require_permission("financials.delete")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Delete financial report record
|
|
Admin only
|
|
"""
|
|
from models import FinancialReport
|
|
|
|
report = db.query(FinancialReport).filter(
|
|
FinancialReport.id == report_id
|
|
).first()
|
|
|
|
if not report:
|
|
raise HTTPException(status_code=404, detail="Financial report not found")
|
|
|
|
db.delete(report)
|
|
db.commit()
|
|
|
|
return {"message": "Financial report deleted successfully"}
|
|
|
|
# Bylaws Admin Routes
|
|
@api_router.post("/admin/bylaws")
|
|
async def create_bylaws(
|
|
title: str = Form(...),
|
|
version: str = Form(...),
|
|
effective_date: str = Form(...),
|
|
document_type: str = Form("google_drive"),
|
|
document_url: str = Form(None),
|
|
is_current: bool = Form(True),
|
|
file: Optional[UploadFile] = File(None),
|
|
current_user: User = Depends(require_permission("bylaws.create")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Create bylaws document
|
|
If is_current=True, sets all others to is_current=False
|
|
Admin only - supports both URL links and file uploads
|
|
"""
|
|
from models import BylawsDocument, StorageUsage
|
|
from r2_storage import get_r2_storage
|
|
|
|
final_url = document_url
|
|
file_size = None
|
|
|
|
# If file uploaded, upload to R2
|
|
if file and document_type == 'upload':
|
|
r2 = get_r2_storage()
|
|
public_url, object_key, file_size_bytes = await r2.upload_file(
|
|
file=file,
|
|
folder="bylaws",
|
|
allowed_types=r2.ALLOWED_DOCUMENT_TYPES,
|
|
max_size_bytes=int(os.getenv('MAX_FILE_SIZE_BYTES', 5242880))
|
|
)
|
|
final_url = public_url
|
|
file_size = file_size_bytes
|
|
|
|
# Update storage usage
|
|
storage = db.query(StorageUsage).first()
|
|
if storage:
|
|
storage.total_bytes_used += file_size
|
|
storage.last_updated = datetime.now(timezone.utc)
|
|
db.commit()
|
|
|
|
if is_current:
|
|
# Set all other bylaws to not current
|
|
db.query(BylawsDocument).update({"is_current": False})
|
|
|
|
bylaws = BylawsDocument(
|
|
title=title,
|
|
version=version,
|
|
effective_date=datetime.fromisoformat(effective_date.replace('Z', '+00:00')),
|
|
document_url=final_url,
|
|
document_type=document_type,
|
|
is_current=is_current,
|
|
file_size_bytes=file_size,
|
|
created_by=current_user.id
|
|
)
|
|
|
|
db.add(bylaws)
|
|
db.commit()
|
|
db.refresh(bylaws)
|
|
|
|
return {
|
|
"id": str(bylaws.id),
|
|
"message": "Bylaws created successfully"
|
|
}
|
|
|
|
@api_router.put("/admin/bylaws/{bylaws_id}")
|
|
async def update_bylaws(
|
|
bylaws_id: str,
|
|
title: str = Form(...),
|
|
version: str = Form(...),
|
|
effective_date: str = Form(...),
|
|
document_type: str = Form("google_drive"),
|
|
document_url: str = Form(None),
|
|
is_current: bool = Form(False),
|
|
file: Optional[UploadFile] = File(None),
|
|
current_user: User = Depends(require_permission("bylaws.edit")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Update bylaws document
|
|
If is_current=True, sets all others to is_current=False
|
|
Admin only - supports both URL links and file uploads
|
|
"""
|
|
from models import BylawsDocument, StorageUsage
|
|
from r2_storage import get_r2_storage
|
|
|
|
bylaws = db.query(BylawsDocument).filter(
|
|
BylawsDocument.id == bylaws_id
|
|
).first()
|
|
|
|
if not bylaws:
|
|
raise HTTPException(status_code=404, detail="Bylaws not found")
|
|
|
|
final_url = document_url
|
|
file_size = bylaws.file_size_bytes
|
|
|
|
# If file uploaded, upload to R2
|
|
if file and document_type == 'upload':
|
|
r2 = get_r2_storage()
|
|
public_url, object_key, file_size_bytes = await r2.upload_file(
|
|
file=file,
|
|
folder="bylaws",
|
|
allowed_types=r2.ALLOWED_DOCUMENT_TYPES,
|
|
max_size_bytes=int(os.getenv('MAX_FILE_SIZE_BYTES', 5242880))
|
|
)
|
|
final_url = public_url
|
|
|
|
# Update storage usage (subtract old, add new)
|
|
storage = db.query(StorageUsage).first()
|
|
if storage and bylaws.file_size_bytes:
|
|
storage.total_bytes_used -= bylaws.file_size_bytes
|
|
if storage:
|
|
storage.total_bytes_used += file_size_bytes
|
|
storage.last_updated = datetime.now(timezone.utc)
|
|
db.commit()
|
|
|
|
file_size = file_size_bytes
|
|
|
|
if is_current:
|
|
# Set all other bylaws to not current
|
|
db.query(BylawsDocument).filter(
|
|
BylawsDocument.id != bylaws_id
|
|
).update({"is_current": False})
|
|
|
|
bylaws.title = title
|
|
bylaws.version = version
|
|
bylaws.effective_date = datetime.fromisoformat(effective_date.replace('Z', '+00:00'))
|
|
bylaws.document_url = final_url
|
|
bylaws.document_type = document_type
|
|
bylaws.is_current = is_current
|
|
bylaws.file_size_bytes = file_size
|
|
|
|
db.commit()
|
|
|
|
return {"message": "Bylaws updated successfully"}
|
|
|
|
@api_router.delete("/admin/bylaws/{bylaws_id}")
|
|
async def delete_bylaws(
|
|
bylaws_id: str,
|
|
current_user: User = Depends(require_permission("bylaws.delete")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Delete bylaws document
|
|
Admin only
|
|
"""
|
|
from models import BylawsDocument
|
|
|
|
bylaws = db.query(BylawsDocument).filter(
|
|
BylawsDocument.id == bylaws_id
|
|
).first()
|
|
|
|
if not bylaws:
|
|
raise HTTPException(status_code=404, detail="Bylaws not found")
|
|
|
|
db.delete(bylaws)
|
|
db.commit()
|
|
|
|
return {"message": "Bylaws deleted successfully"}
|
|
|
|
# ============================================================
|
|
# Role Management Endpoints (Superadmin Only)
|
|
# ============================================================
|
|
|
|
@api_router.get("/admin/roles", response_model=List[RoleResponse])
|
|
async def get_all_roles(
|
|
current_user: User = Depends(get_current_superadmin),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Get all roles in the system with permission counts
|
|
Superadmin only
|
|
"""
|
|
from sqlalchemy import func
|
|
|
|
# Query roles with permission counts
|
|
roles_query = db.query(
|
|
Role,
|
|
func.count(RolePermission.id).label('permission_count')
|
|
).outerjoin(RolePermission, Role.id == RolePermission.role_id)\
|
|
.group_by(Role.id)\
|
|
.order_by(Role.is_system_role.desc(), Role.name)
|
|
|
|
roles_with_counts = roles_query.all()
|
|
|
|
return [
|
|
{
|
|
"id": str(role.id),
|
|
"code": role.code,
|
|
"name": role.name,
|
|
"description": role.description,
|
|
"is_system_role": role.is_system_role,
|
|
"created_at": role.created_at,
|
|
"updated_at": role.updated_at,
|
|
"permission_count": count
|
|
}
|
|
for role, count in roles_with_counts
|
|
]
|
|
|
|
@api_router.get("/admin/roles/assignable", response_model=List[RoleResponse])
|
|
async def get_assignable_roles(
|
|
current_user: User = Depends(require_permission("users.create")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Get roles that the current user can assign when inviting staff
|
|
|
|
- Superadmin: Can assign all roles
|
|
- Admin: Can assign admin, finance, and non-elevated custom roles
|
|
- Returns roles filtered by user's permission level
|
|
"""
|
|
from sqlalchemy import func
|
|
|
|
# Query all roles with permission counts
|
|
roles_query = db.query(
|
|
Role,
|
|
func.count(RolePermission.id).label('permission_count')
|
|
).outerjoin(RolePermission, Role.id == RolePermission.role_id)\
|
|
.group_by(Role.id)\
|
|
.order_by(Role.is_system_role.desc(), Role.name)
|
|
|
|
all_roles = roles_query.all()
|
|
|
|
# Superadmin can assign any role
|
|
if current_user.role == UserRole.superadmin:
|
|
return [
|
|
{
|
|
"id": str(role.id),
|
|
"code": role.code,
|
|
"name": role.name,
|
|
"description": role.description,
|
|
"is_system_role": role.is_system_role,
|
|
"created_at": role.created_at,
|
|
"updated_at": role.updated_at,
|
|
"permission_count": count
|
|
}
|
|
for role, count in all_roles
|
|
]
|
|
|
|
# Admin users can assign: admin, finance, and non-elevated custom roles
|
|
# Get admin role's permissions to check for elevation
|
|
admin_role = db.query(Role).filter(Role.code == "admin").first()
|
|
admin_permission_codes = set()
|
|
if admin_role:
|
|
admin_permissions = db.query(RolePermission).filter(
|
|
RolePermission.role_id == admin_role.id
|
|
).all()
|
|
admin_permission_codes = {rp.permission_id for rp in admin_permissions}
|
|
|
|
assignable_roles = []
|
|
for role, count in all_roles:
|
|
# Always exclude superadmin role
|
|
if role.code == "superadmin":
|
|
continue
|
|
|
|
# Include system roles: admin and finance
|
|
if role.is_system_role and role.code in ["admin", "finance"]:
|
|
assignable_roles.append({
|
|
"id": str(role.id),
|
|
"code": role.code,
|
|
"name": role.name,
|
|
"description": role.description,
|
|
"is_system_role": role.is_system_role,
|
|
"created_at": role.created_at,
|
|
"updated_at": role.updated_at,
|
|
"permission_count": count
|
|
})
|
|
continue
|
|
|
|
# For custom roles, check if they're elevated
|
|
if not role.is_system_role:
|
|
role_permissions = db.query(RolePermission).filter(
|
|
RolePermission.role_id == role.id
|
|
).all()
|
|
role_permission_ids = {rp.permission_id for rp in role_permissions}
|
|
|
|
# Check if custom role has permissions admin doesn't have (elevated)
|
|
has_elevated_permissions = bool(role_permission_ids - admin_permission_codes)
|
|
|
|
# Only include non-elevated custom roles
|
|
if not has_elevated_permissions:
|
|
assignable_roles.append({
|
|
"id": str(role.id),
|
|
"code": role.code,
|
|
"name": role.name,
|
|
"description": role.description,
|
|
"is_system_role": role.is_system_role,
|
|
"created_at": role.created_at,
|
|
"updated_at": role.updated_at,
|
|
"permission_count": count
|
|
})
|
|
|
|
return assignable_roles
|
|
|
|
@api_router.post("/admin/roles", response_model=RoleResponse)
|
|
async def create_role(
|
|
request: CreateRoleRequest,
|
|
current_user: User = Depends(get_current_superadmin),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Create a new custom role
|
|
Superadmin only
|
|
"""
|
|
# Check if role code already exists
|
|
existing_role = db.query(Role).filter(Role.code == request.code).first()
|
|
if existing_role:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
detail=f"Role with code '{request.code}' already exists"
|
|
)
|
|
|
|
# Create role
|
|
new_role = Role(
|
|
code=request.code.lower().strip(),
|
|
name=request.name.strip(),
|
|
description=request.description,
|
|
is_system_role=False, # Custom roles are never system roles
|
|
created_by=current_user.id
|
|
)
|
|
db.add(new_role)
|
|
db.flush() # Flush to get the role ID
|
|
|
|
# Assign permissions if provided
|
|
if request.permission_codes:
|
|
# Map role code to enum (for backward compatibility)
|
|
role_enum_map = {
|
|
'guest': UserRole.guest,
|
|
'member': UserRole.member,
|
|
'admin': UserRole.admin,
|
|
'superadmin': UserRole.superadmin
|
|
}
|
|
role_enum = role_enum_map.get(new_role.code, UserRole.guest)
|
|
|
|
for perm_code in request.permission_codes:
|
|
permission = db.query(Permission).filter(Permission.code == perm_code).first()
|
|
if permission:
|
|
role_perm = RolePermission(
|
|
role=role_enum, # Set legacy enum for backward compatibility
|
|
role_id=new_role.id,
|
|
permission_id=permission.id,
|
|
created_by=current_user.id
|
|
)
|
|
db.add(role_perm)
|
|
|
|
db.commit()
|
|
db.refresh(new_role)
|
|
|
|
# Get permission count
|
|
perm_count = db.query(RolePermission).filter(RolePermission.role_id == new_role.id).count()
|
|
|
|
return {
|
|
"id": str(new_role.id),
|
|
"code": new_role.code,
|
|
"name": new_role.name,
|
|
"description": new_role.description,
|
|
"is_system_role": new_role.is_system_role,
|
|
"created_at": new_role.created_at,
|
|
"updated_at": new_role.updated_at,
|
|
"permission_count": perm_count
|
|
}
|
|
|
|
@api_router.get("/admin/roles/{role_id}", response_model=RoleResponse)
|
|
async def get_role(
|
|
role_id: str,
|
|
current_user: User = Depends(get_current_superadmin),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Get role details by ID
|
|
Superadmin only
|
|
"""
|
|
role = db.query(Role).filter(Role.id == role_id).first()
|
|
if not role:
|
|
raise HTTPException(status_code=404, detail="Role not found")
|
|
|
|
perm_count = db.query(RolePermission).filter(RolePermission.role_id == role.id).count()
|
|
|
|
return {
|
|
"id": str(role.id),
|
|
"code": role.code,
|
|
"name": role.name,
|
|
"description": role.description,
|
|
"is_system_role": role.is_system_role,
|
|
"created_at": role.created_at,
|
|
"updated_at": role.updated_at,
|
|
"permission_count": perm_count
|
|
}
|
|
|
|
@api_router.put("/admin/roles/{role_id}", response_model=RoleResponse)
|
|
async def update_role(
|
|
role_id: str,
|
|
request: UpdateRoleRequest,
|
|
current_user: User = Depends(get_current_superadmin),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Update role details (name, description)
|
|
Cannot update system roles or role code
|
|
Superadmin only
|
|
"""
|
|
role = db.query(Role).filter(Role.id == role_id).first()
|
|
if not role:
|
|
raise HTTPException(status_code=404, detail="Role not found")
|
|
|
|
if role.is_system_role:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_403_FORBIDDEN,
|
|
detail="Cannot update system roles"
|
|
)
|
|
|
|
# Update fields
|
|
if request.name:
|
|
role.name = request.name.strip()
|
|
if request.description is not None:
|
|
role.description = request.description
|
|
|
|
role.updated_at = datetime.now(timezone.utc)
|
|
db.commit()
|
|
db.refresh(role)
|
|
|
|
perm_count = db.query(RolePermission).filter(RolePermission.role_id == role.id).count()
|
|
|
|
return {
|
|
"id": str(role.id),
|
|
"code": role.code,
|
|
"name": role.name,
|
|
"description": role.description,
|
|
"is_system_role": role.is_system_role,
|
|
"created_at": role.created_at,
|
|
"updated_at": role.updated_at,
|
|
"permission_count": perm_count
|
|
}
|
|
|
|
@api_router.delete("/admin/roles/{role_id}")
|
|
async def delete_role(
|
|
role_id: str,
|
|
current_user: User = Depends(get_current_superadmin),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Delete a custom role
|
|
Cannot delete system roles or roles assigned to users
|
|
Superadmin only
|
|
"""
|
|
role = db.query(Role).filter(Role.id == role_id).first()
|
|
if not role:
|
|
raise HTTPException(status_code=404, detail="Role not found")
|
|
|
|
if role.is_system_role:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_403_FORBIDDEN,
|
|
detail="Cannot delete system roles"
|
|
)
|
|
|
|
# Check if any users have this role
|
|
users_with_role = db.query(User).filter(User.role_id == role_id).count()
|
|
if users_with_role > 0:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
detail=f"Cannot delete role: {users_with_role} user(s) are assigned this role"
|
|
)
|
|
|
|
# Delete role permissions first (CASCADE should handle this, but being explicit)
|
|
db.query(RolePermission).filter(RolePermission.role_id == role_id).delete()
|
|
|
|
# Delete role
|
|
db.delete(role)
|
|
db.commit()
|
|
|
|
return {"message": f"Role '{role.name}' deleted successfully"}
|
|
|
|
@api_router.get("/admin/roles/{role_id}/permissions")
|
|
async def get_role_permissions(
|
|
role_id: str,
|
|
current_user: User = Depends(get_current_superadmin),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Get all permissions assigned to a role
|
|
Superadmin only
|
|
"""
|
|
role = db.query(Role).filter(Role.id == role_id).first()
|
|
if not role:
|
|
raise HTTPException(status_code=404, detail="Role not found")
|
|
|
|
permissions = db.query(Permission)\
|
|
.join(RolePermission)\
|
|
.filter(RolePermission.role_id == role_id)\
|
|
.order_by(Permission.module, Permission.code)\
|
|
.all()
|
|
|
|
return {
|
|
"role_id": str(role.id),
|
|
"role_name": role.name,
|
|
"permissions": [
|
|
{
|
|
"id": str(perm.id),
|
|
"code": perm.code,
|
|
"name": perm.name,
|
|
"description": perm.description,
|
|
"module": perm.module
|
|
}
|
|
for perm in permissions
|
|
]
|
|
}
|
|
|
|
@api_router.put("/admin/roles/{role_id}/permissions")
|
|
async def assign_role_permissions(
|
|
role_id: str,
|
|
request: AssignRolePermissionsRequest,
|
|
current_user: User = Depends(get_current_superadmin),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Assign permissions to a role (replaces existing permissions)
|
|
Superadmin only
|
|
"""
|
|
role = db.query(Role).filter(Role.id == role_id).first()
|
|
if not role:
|
|
raise HTTPException(status_code=404, detail="Role not found")
|
|
|
|
# Remove existing permissions
|
|
db.query(RolePermission).filter(RolePermission.role_id == role_id).delete()
|
|
|
|
# Map role code to enum (for backward compatibility with legacy role column)
|
|
role_enum_map = {
|
|
'guest': UserRole.guest,
|
|
'member': UserRole.member,
|
|
'admin': UserRole.admin,
|
|
'superadmin': UserRole.superadmin
|
|
}
|
|
role_enum = role_enum_map.get(role.code, UserRole.guest) # Default to guest if custom role
|
|
|
|
# Add new permissions
|
|
for perm_code in request.permission_codes:
|
|
permission = db.query(Permission).filter(Permission.code == perm_code).first()
|
|
if not permission:
|
|
logger.warning(f"Permission code '{perm_code}' not found, skipping")
|
|
continue
|
|
|
|
role_perm = RolePermission(
|
|
role=role_enum, # Set legacy enum for backward compatibility
|
|
role_id=role.id,
|
|
permission_id=permission.id,
|
|
created_by=current_user.id
|
|
)
|
|
db.add(role_perm)
|
|
|
|
db.commit()
|
|
|
|
return {
|
|
"message": f"Assigned {len(request.permission_codes)} permissions to role '{role.name}'",
|
|
"role_id": str(role.id),
|
|
"permission_codes": request.permission_codes
|
|
}
|
|
|
|
# ============================================================
|
|
# Permission Management Endpoints (Superadmin Only)
|
|
# ============================================================
|
|
|
|
@api_router.get("/admin/permissions", response_model=List[PermissionResponse])
|
|
async def get_all_permissions(
|
|
current_user: User = Depends(get_current_superadmin),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Get all permissions in the system
|
|
Superadmin only
|
|
"""
|
|
permissions = db.query(Permission).order_by(Permission.module, Permission.code).all()
|
|
|
|
return [
|
|
{
|
|
"id": str(perm.id),
|
|
"code": perm.code,
|
|
"name": perm.name,
|
|
"description": perm.description,
|
|
"module": perm.module,
|
|
"created_at": perm.created_at
|
|
}
|
|
for perm in permissions
|
|
]
|
|
|
|
@api_router.get("/admin/permissions/modules")
|
|
async def get_permission_modules(
|
|
current_user: User = Depends(get_current_superadmin),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Get all permission modules with permission counts
|
|
Superadmin only
|
|
"""
|
|
from sqlalchemy import func
|
|
|
|
# Get all permissions grouped by module
|
|
modules = db.query(
|
|
Permission.module,
|
|
func.count(Permission.id).label('permission_count')
|
|
).group_by(Permission.module).all()
|
|
|
|
# Get permissions for each module
|
|
result = []
|
|
for module_name, count in modules:
|
|
permissions = db.query(Permission)\
|
|
.filter(Permission.module == module_name)\
|
|
.order_by(Permission.code)\
|
|
.all()
|
|
|
|
result.append({
|
|
"module": module_name,
|
|
"permission_count": count,
|
|
"permissions": [
|
|
{
|
|
"id": str(p.id),
|
|
"code": p.code,
|
|
"name": p.name,
|
|
"description": p.description
|
|
}
|
|
for p in permissions
|
|
]
|
|
})
|
|
|
|
return result
|
|
|
|
@api_router.get("/admin/permissions/roles/{role}")
|
|
async def get_role_permissions(
|
|
role: str,
|
|
current_user: User = Depends(get_current_superadmin),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Get permissions assigned to a specific role
|
|
Superadmin only
|
|
"""
|
|
# Validate role exists
|
|
try:
|
|
role_enum = UserRole[role]
|
|
except KeyError:
|
|
raise HTTPException(status_code=400, detail=f"Invalid role: {role}")
|
|
|
|
# Superadmin always has all permissions (enforced in code, not stored)
|
|
if role_enum == UserRole.superadmin:
|
|
all_permissions = db.query(Permission).all()
|
|
return {
|
|
"role": role,
|
|
"permissions": [
|
|
{
|
|
"id": str(p.id),
|
|
"code": p.code,
|
|
"name": p.name,
|
|
"description": p.description,
|
|
"module": p.module
|
|
}
|
|
for p in all_permissions
|
|
],
|
|
"note": "Superadmin automatically has all permissions"
|
|
}
|
|
|
|
# Get permissions for other roles
|
|
permissions = db.query(Permission)\
|
|
.join(RolePermission)\
|
|
.filter(RolePermission.role == role_enum)\
|
|
.order_by(Permission.module, Permission.code)\
|
|
.all()
|
|
|
|
return {
|
|
"role": role,
|
|
"permissions": [
|
|
{
|
|
"id": str(p.id),
|
|
"code": p.code,
|
|
"name": p.name,
|
|
"description": p.description,
|
|
"module": p.module
|
|
}
|
|
for p in permissions
|
|
]
|
|
}
|
|
|
|
@api_router.put("/admin/permissions/roles/{role}")
|
|
async def assign_role_permissions(
|
|
role: str,
|
|
request: AssignPermissionsRequest,
|
|
current_user: User = Depends(get_current_superadmin),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Assign permissions to a role (cherry-pick permissions)
|
|
Superadmin only
|
|
|
|
This replaces all existing permissions for the role with the provided list.
|
|
"""
|
|
# Validate role exists
|
|
try:
|
|
role_enum = UserRole[role]
|
|
except KeyError:
|
|
raise HTTPException(status_code=400, detail=f"Invalid role: {role}")
|
|
|
|
# Prevent modifying superadmin permissions
|
|
if role_enum == UserRole.superadmin:
|
|
raise HTTPException(
|
|
status_code=403,
|
|
detail="Cannot modify superadmin permissions. Superadmin always has all permissions."
|
|
)
|
|
|
|
# Validate all permission codes exist
|
|
permissions_to_assign = []
|
|
for code in request.permission_codes:
|
|
permission = db.query(Permission).filter(Permission.code == code).first()
|
|
if not permission:
|
|
raise HTTPException(status_code=400, detail=f"Invalid permission code: {code}")
|
|
permissions_to_assign.append(permission)
|
|
|
|
# Remove existing permissions for this role
|
|
db.query(RolePermission).filter(RolePermission.role == role_enum).delete()
|
|
|
|
# Add new permissions
|
|
for permission in permissions_to_assign:
|
|
role_permission = RolePermission(
|
|
role=role_enum,
|
|
permission_id=permission.id,
|
|
created_by=current_user.id
|
|
)
|
|
db.add(role_permission)
|
|
|
|
db.commit()
|
|
|
|
return {
|
|
"message": f"Successfully assigned {len(permissions_to_assign)} permissions to {role}",
|
|
"role": role,
|
|
"permission_count": len(permissions_to_assign)
|
|
}
|
|
|
|
@api_router.post("/admin/permissions/seed")
|
|
async def seed_permissions(
|
|
current_user: User = Depends(get_current_superadmin),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Seed default permissions into the database
|
|
Superadmin only
|
|
|
|
WARNING: This will clear all existing permissions and role assignments
|
|
"""
|
|
import subprocess
|
|
import sys
|
|
|
|
try:
|
|
# Run the permissions_seed.py script
|
|
result = subprocess.run(
|
|
[sys.executable, "permissions_seed.py"],
|
|
capture_output=True,
|
|
text=True,
|
|
timeout=30
|
|
)
|
|
|
|
if result.returncode != 0:
|
|
raise HTTPException(
|
|
status_code=500,
|
|
detail=f"Failed to seed permissions: {result.stderr}"
|
|
)
|
|
|
|
return {
|
|
"message": "Permissions seeded successfully",
|
|
"output": result.stdout
|
|
}
|
|
|
|
except subprocess.TimeoutExpired:
|
|
raise HTTPException(status_code=500, detail="Permission seeding timed out")
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=f"Error seeding permissions: {str(e)}")
|
|
|
|
@api_router.post("/subscriptions/checkout")
|
|
async def create_checkout(
|
|
request: CheckoutRequest,
|
|
current_user: User = Depends(get_current_user),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Create Stripe Checkout session with dynamic pricing and donation tracking."""
|
|
|
|
# Status validation - only allow payment_pending and inactive users
|
|
allowed_statuses = [UserStatus.payment_pending, UserStatus.inactive]
|
|
if current_user.status not in allowed_statuses:
|
|
raise HTTPException(
|
|
status_code=403,
|
|
detail=f"Cannot proceed with payment. User status is '{current_user.status.value}'. "
|
|
f"Please complete email verification and admin approval first."
|
|
)
|
|
|
|
# Get plan
|
|
plan = db.query(SubscriptionPlan).filter(
|
|
SubscriptionPlan.id == request.plan_id
|
|
).first()
|
|
|
|
if not plan:
|
|
raise HTTPException(status_code=404, detail="Plan not found")
|
|
|
|
if not plan.active:
|
|
raise HTTPException(status_code=400, detail="This plan is no longer available for subscription")
|
|
|
|
# Validate amount against plan minimum
|
|
if request.amount_cents < plan.minimum_price_cents:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail=f"Amount must be at least ${plan.minimum_price_cents / 100:.2f}"
|
|
)
|
|
|
|
# Calculate donation split
|
|
base_amount = plan.minimum_price_cents
|
|
donation_amount = request.amount_cents - base_amount
|
|
|
|
# Check if plan allows donations
|
|
if donation_amount > 0 and not plan.allow_donation:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail="This plan does not accept donations above the minimum price"
|
|
)
|
|
|
|
# Get frontend URL from env
|
|
frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000")
|
|
|
|
try:
|
|
# Build line items for Stripe checkout
|
|
line_items = []
|
|
|
|
# Add base subscription line item with dynamic pricing
|
|
from payment_service import get_stripe_interval
|
|
stripe_interval = get_stripe_interval(plan.billing_cycle)
|
|
|
|
if stripe_interval: # Recurring subscription
|
|
line_items.append({
|
|
"price_data": {
|
|
"currency": "usd",
|
|
"unit_amount": base_amount,
|
|
"recurring": {"interval": stripe_interval},
|
|
"product_data": {
|
|
"name": plan.name,
|
|
"description": plan.description or f"{plan.name} membership"
|
|
}
|
|
},
|
|
"quantity": 1
|
|
})
|
|
else: # One-time payment (lifetime)
|
|
line_items.append({
|
|
"price_data": {
|
|
"currency": "usd",
|
|
"unit_amount": base_amount,
|
|
"product_data": {
|
|
"name": plan.name,
|
|
"description": plan.description or f"{plan.name} membership"
|
|
}
|
|
},
|
|
"quantity": 1
|
|
})
|
|
|
|
# Add donation line item if applicable
|
|
if donation_amount > 0:
|
|
line_items.append({
|
|
"price_data": {
|
|
"currency": "usd",
|
|
"unit_amount": donation_amount,
|
|
"product_data": {
|
|
"name": "Donation",
|
|
"description": f"Additional donation to support {plan.name}"
|
|
}
|
|
},
|
|
"quantity": 1
|
|
})
|
|
|
|
# Create Stripe Checkout Session
|
|
import stripe
|
|
# Try to get Stripe API key from database first, then fall back to environment
|
|
stripe_key = get_setting(db, 'stripe_secret_key', decrypt=True)
|
|
if not stripe_key:
|
|
stripe_key = os.getenv("STRIPE_SECRET_KEY")
|
|
|
|
if not stripe_key:
|
|
raise HTTPException(status_code=500, detail="Stripe API key not configured")
|
|
|
|
stripe.api_key = stripe_key
|
|
|
|
mode = "subscription" if stripe_interval else "payment"
|
|
|
|
session = stripe.checkout.Session.create(
|
|
customer_email=current_user.email,
|
|
payment_method_types=["card"],
|
|
line_items=line_items,
|
|
mode=mode,
|
|
success_url=f"{frontend_url}/payment-success?session_id={{CHECKOUT_SESSION_ID}}",
|
|
cancel_url=f"{frontend_url}/payment-cancel",
|
|
metadata={
|
|
"user_id": str(current_user.id),
|
|
"plan_id": str(plan.id),
|
|
"base_amount": str(base_amount),
|
|
"donation_amount": str(donation_amount),
|
|
"total_amount": str(request.amount_cents)
|
|
},
|
|
subscription_data={
|
|
"metadata": {
|
|
"user_id": str(current_user.id),
|
|
"plan_id": str(plan.id),
|
|
"base_amount": str(base_amount),
|
|
"donation_amount": str(donation_amount)
|
|
}
|
|
} if mode == "subscription" else None
|
|
)
|
|
|
|
return {"checkout_url": session.url}
|
|
|
|
except stripe.error.StripeError as e:
|
|
logger.error(f"Stripe error creating checkout session: {str(e)}")
|
|
raise HTTPException(status_code=500, detail=f"Payment processing error: {str(e)}")
|
|
except Exception as e:
|
|
logger.error(f"Error creating checkout session: {str(e)}")
|
|
raise HTTPException(status_code=500, detail="Failed to create checkout session")
|
|
|
|
@api_router.post("/donations/checkout")
|
|
async def create_donation_checkout(
|
|
request: DonationCheckoutRequest,
|
|
db: Session = Depends(get_db),
|
|
current_user: Optional[User] = Depends(lambda: None) # Optional authentication
|
|
):
|
|
"""Create Stripe Checkout session for one-time donation."""
|
|
|
|
# Get frontend URL from env
|
|
frontend_url = os.getenv("FRONTEND_URL", "http://localhost:3000")
|
|
|
|
# Check if user is authenticated (from header if present)
|
|
try:
|
|
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
|
from jose import jwt, JWTError
|
|
|
|
# Try to get token from request if available
|
|
# For now, we'll make this work for both authenticated and anonymous
|
|
pass
|
|
except:
|
|
pass
|
|
|
|
try:
|
|
# Create donation record first
|
|
donation = Donation(
|
|
amount_cents=request.amount_cents,
|
|
donation_type=DonationType.member if current_user else DonationType.public,
|
|
user_id=current_user.id if current_user else None,
|
|
donor_email=current_user.email if current_user else None,
|
|
donor_name=f"{current_user.first_name} {current_user.last_name}" if current_user else None,
|
|
status=DonationStatus.pending
|
|
)
|
|
db.add(donation)
|
|
db.commit()
|
|
db.refresh(donation)
|
|
|
|
# Create Stripe Checkout Session for one-time payment
|
|
import stripe
|
|
# Try to get Stripe API key from database first, then fall back to environment
|
|
stripe_key = get_setting(db, 'stripe_secret_key', decrypt=True)
|
|
if not stripe_key:
|
|
stripe_key = os.getenv("STRIPE_SECRET_KEY")
|
|
|
|
if not stripe_key:
|
|
raise HTTPException(status_code=500, detail="Stripe API key not configured")
|
|
|
|
stripe.api_key = stripe_key
|
|
|
|
checkout_session = stripe.checkout.Session.create(
|
|
payment_method_types=['card'],
|
|
line_items=[{
|
|
'price_data': {
|
|
'currency': 'usd',
|
|
'unit_amount': request.amount_cents,
|
|
'product_data': {
|
|
'name': 'Donation to LOAF',
|
|
'description': 'Thank you for supporting our community!'
|
|
}
|
|
},
|
|
'quantity': 1
|
|
}],
|
|
mode='payment', # One-time payment (not subscription)
|
|
success_url=f"{frontend_url}/donation-success?session_id={{CHECKOUT_SESSION_ID}}",
|
|
cancel_url=f"{frontend_url}/donate",
|
|
metadata={
|
|
'donation_id': str(donation.id),
|
|
'donation_type': donation.donation_type.value,
|
|
'user_id': str(current_user.id) if current_user else None
|
|
}
|
|
)
|
|
|
|
# Update donation with session ID
|
|
donation.stripe_checkout_session_id = checkout_session.id
|
|
db.commit()
|
|
|
|
logger.info(f"Donation checkout created: ${request.amount_cents/100:.2f} (ID: {donation.id})")
|
|
|
|
return {"checkout_url": checkout_session.url}
|
|
|
|
except stripe.error.StripeError as e:
|
|
db.rollback()
|
|
logger.error(f"Stripe error creating donation checkout: {str(e)}")
|
|
raise HTTPException(status_code=500, detail=f"Payment processing error: {str(e)}")
|
|
except Exception as e:
|
|
db.rollback()
|
|
logger.error(f"Error creating donation checkout: {str(e)}")
|
|
raise HTTPException(status_code=500, detail="Failed to create donation checkout")
|
|
|
|
# ============================================================
|
|
# Payment Method Management API Endpoints
|
|
# ============================================================
|
|
|
|
class PaymentMethodResponse(BaseModel):
|
|
id: str
|
|
card_brand: Optional[str] = None
|
|
card_last4: Optional[str] = None
|
|
card_exp_month: Optional[int] = None
|
|
card_exp_year: Optional[int] = None
|
|
card_funding: Optional[str] = None
|
|
payment_type: str
|
|
is_default: bool
|
|
is_manual: bool
|
|
manual_notes: Optional[str] = None
|
|
created_at: datetime
|
|
|
|
model_config = {"from_attributes": True}
|
|
|
|
class PaymentMethodSaveRequest(BaseModel):
|
|
stripe_payment_method_id: str
|
|
set_as_default: bool = False
|
|
|
|
class AdminManualPaymentMethodRequest(BaseModel):
|
|
payment_type: Literal["cash", "bank_transfer", "check"]
|
|
manual_notes: Optional[str] = None
|
|
set_as_default: bool = False
|
|
|
|
class AdminRevealRequest(BaseModel):
|
|
password: str
|
|
|
|
|
|
def get_or_create_stripe_customer(user: User, db: Session) -> str:
|
|
"""Get existing or create new Stripe customer for user."""
|
|
import stripe
|
|
|
|
# Get Stripe API key
|
|
stripe_key = get_setting(db, 'stripe_secret_key', decrypt=True)
|
|
if not stripe_key:
|
|
stripe_key = os.getenv("STRIPE_SECRET_KEY")
|
|
if not stripe_key:
|
|
raise HTTPException(status_code=500, detail="Stripe API key not configured")
|
|
stripe.api_key = stripe_key
|
|
|
|
if user.stripe_customer_id:
|
|
# Verify customer still exists in Stripe
|
|
try:
|
|
customer = stripe.Customer.retrieve(user.stripe_customer_id)
|
|
# Check if customer was deleted using getattr (Stripe SDK doesn't expose 'deleted' directly)
|
|
if getattr(customer, 'deleted', False) or customer.get('deleted', False):
|
|
# Customer was deleted, create a new one
|
|
user.stripe_customer_id = None
|
|
else:
|
|
return user.stripe_customer_id
|
|
except stripe.error.InvalidRequestError:
|
|
# Customer doesn't exist, create a new one
|
|
user.stripe_customer_id = None
|
|
except Exception as e:
|
|
logger.warning(f"Error retrieving Stripe customer {user.stripe_customer_id}: {str(e)}")
|
|
user.stripe_customer_id = None
|
|
|
|
# Create new Stripe customer
|
|
customer = stripe.Customer.create(
|
|
email=user.email,
|
|
name=f"{user.first_name} {user.last_name}",
|
|
metadata={"user_id": str(user.id)}
|
|
)
|
|
|
|
user.stripe_customer_id = customer.id
|
|
db.commit()
|
|
logger.info(f"Created Stripe customer {customer.id} for user {user.id}")
|
|
|
|
return customer.id
|
|
|
|
|
|
@api_router.get("/payment-methods")
|
|
async def list_payment_methods(
|
|
current_user: User = Depends(get_current_user),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""List current user's saved payment methods."""
|
|
methods = db.query(PaymentMethod).filter(
|
|
PaymentMethod.user_id == current_user.id,
|
|
PaymentMethod.is_active == True
|
|
).order_by(PaymentMethod.is_default.desc(), PaymentMethod.created_at.desc()).all()
|
|
|
|
return [{
|
|
"id": str(m.id),
|
|
"card_brand": m.card_brand,
|
|
"card_last4": m.card_last4,
|
|
"card_exp_month": m.card_exp_month,
|
|
"card_exp_year": m.card_exp_year,
|
|
"card_funding": m.card_funding,
|
|
"payment_type": m.payment_type.value,
|
|
"is_default": m.is_default,
|
|
"is_manual": m.is_manual,
|
|
"manual_notes": m.manual_notes if m.is_manual else None,
|
|
"created_at": m.created_at.isoformat()
|
|
} for m in methods]
|
|
|
|
|
|
@api_router.post("/payment-methods/setup-intent")
|
|
async def create_setup_intent(
|
|
current_user: User = Depends(get_current_user),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Create a Stripe SetupIntent for adding a new payment method."""
|
|
import stripe
|
|
|
|
# Get or create Stripe customer
|
|
customer_id = get_or_create_stripe_customer(current_user, db)
|
|
|
|
# Get Stripe API key
|
|
stripe_key = get_setting(db, 'stripe_secret_key', decrypt=True)
|
|
if not stripe_key:
|
|
stripe_key = os.getenv("STRIPE_SECRET_KEY")
|
|
stripe.api_key = stripe_key
|
|
|
|
# Create SetupIntent
|
|
setup_intent = stripe.SetupIntent.create(
|
|
customer=customer_id,
|
|
payment_method_types=["card"],
|
|
metadata={"user_id": str(current_user.id)}
|
|
)
|
|
|
|
logger.info(f"Created SetupIntent for user {current_user.id}")
|
|
|
|
return {
|
|
"client_secret": setup_intent.client_secret,
|
|
"setup_intent_id": setup_intent.id
|
|
}
|
|
|
|
|
|
@api_router.post("/payment-methods")
|
|
async def save_payment_method(
|
|
request: PaymentMethodSaveRequest,
|
|
current_user: User = Depends(get_current_user),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Save a payment method after successful SetupIntent confirmation."""
|
|
import stripe
|
|
|
|
# Get Stripe API key
|
|
stripe_key = get_setting(db, 'stripe_secret_key', decrypt=True)
|
|
if not stripe_key:
|
|
stripe_key = os.getenv("STRIPE_SECRET_KEY")
|
|
stripe.api_key = stripe_key
|
|
|
|
# Refresh user from DB to get latest stripe_customer_id
|
|
db.refresh(current_user)
|
|
|
|
# Retrieve payment method from Stripe
|
|
try:
|
|
pm = stripe.PaymentMethod.retrieve(request.stripe_payment_method_id)
|
|
except stripe.error.InvalidRequestError as e:
|
|
logger.error(f"Invalid payment method ID: {request.stripe_payment_method_id}, error: {str(e)}")
|
|
raise HTTPException(status_code=400, detail="Invalid payment method ID")
|
|
|
|
# Verify ownership - payment method must be attached to user's customer
|
|
pm_customer = pm.customer if hasattr(pm, 'customer') else None
|
|
logger.info(f"Verifying PM ownership: pm.customer={pm_customer}, user.stripe_customer_id={current_user.stripe_customer_id}")
|
|
|
|
if not current_user.stripe_customer_id:
|
|
raise HTTPException(status_code=403, detail="User does not have a Stripe customer ID")
|
|
|
|
if not pm_customer:
|
|
raise HTTPException(status_code=403, detail="Payment method is not attached to any customer")
|
|
|
|
if pm_customer != current_user.stripe_customer_id:
|
|
raise HTTPException(status_code=403, detail="Payment method not owned by user")
|
|
|
|
# Check for duplicate
|
|
existing = db.query(PaymentMethod).filter(
|
|
PaymentMethod.stripe_payment_method_id == request.stripe_payment_method_id,
|
|
PaymentMethod.is_active == True
|
|
).first()
|
|
|
|
if existing:
|
|
raise HTTPException(status_code=400, detail="Payment method already saved")
|
|
|
|
# Handle default setting - unset others if setting this as default
|
|
if request.set_as_default:
|
|
db.query(PaymentMethod).filter(
|
|
PaymentMethod.user_id == current_user.id,
|
|
PaymentMethod.is_active == True
|
|
).update({"is_default": False})
|
|
|
|
# Extract card details
|
|
card = pm.card if pm.type == "card" else None
|
|
|
|
# Create payment method record
|
|
payment_method = PaymentMethod(
|
|
user_id=current_user.id,
|
|
stripe_payment_method_id=request.stripe_payment_method_id,
|
|
card_brand=card.brand if card else None,
|
|
card_last4=card.last4 if card else None,
|
|
card_exp_month=card.exp_month if card else None,
|
|
card_exp_year=card.exp_year if card else None,
|
|
card_funding=card.funding if card else None,
|
|
payment_type=PaymentMethodType.card,
|
|
is_default=request.set_as_default,
|
|
is_active=True,
|
|
is_manual=False
|
|
)
|
|
|
|
db.add(payment_method)
|
|
db.commit()
|
|
db.refresh(payment_method)
|
|
|
|
logger.info(f"Saved payment method {payment_method.id} for user {current_user.id}")
|
|
|
|
return {
|
|
"id": str(payment_method.id),
|
|
"card_brand": payment_method.card_brand,
|
|
"card_last4": payment_method.card_last4,
|
|
"card_exp_month": payment_method.card_exp_month,
|
|
"card_exp_year": payment_method.card_exp_year,
|
|
"is_default": payment_method.is_default,
|
|
"message": "Payment method saved successfully"
|
|
}
|
|
|
|
|
|
@api_router.put("/payment-methods/{payment_method_id}/default")
|
|
async def set_default_payment_method(
|
|
payment_method_id: str,
|
|
current_user: User = Depends(get_current_user),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Set a payment method as the default for auto-renewals."""
|
|
try:
|
|
pm_uuid = uuid.UUID(payment_method_id)
|
|
except ValueError:
|
|
raise HTTPException(status_code=400, detail="Invalid payment method ID")
|
|
|
|
payment_method = db.query(PaymentMethod).filter(
|
|
PaymentMethod.id == pm_uuid,
|
|
PaymentMethod.user_id == current_user.id,
|
|
PaymentMethod.is_active == True
|
|
).first()
|
|
|
|
if not payment_method:
|
|
raise HTTPException(status_code=404, detail="Payment method not found")
|
|
|
|
# Unset all other defaults
|
|
db.query(PaymentMethod).filter(
|
|
PaymentMethod.user_id == current_user.id,
|
|
PaymentMethod.is_active == True
|
|
).update({"is_default": False})
|
|
|
|
# Set this one as default
|
|
payment_method.is_default = True
|
|
db.commit()
|
|
|
|
logger.info(f"Set default payment method {payment_method_id} for user {current_user.id}")
|
|
|
|
return {"message": "Default payment method updated", "id": str(payment_method.id)}
|
|
|
|
|
|
@api_router.delete("/payment-methods/{payment_method_id}")
|
|
async def delete_payment_method(
|
|
payment_method_id: str,
|
|
current_user: User = Depends(get_current_user),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Delete (soft-delete) a saved payment method."""
|
|
import stripe
|
|
|
|
try:
|
|
pm_uuid = uuid.UUID(payment_method_id)
|
|
except ValueError:
|
|
raise HTTPException(status_code=400, detail="Invalid payment method ID")
|
|
|
|
payment_method = db.query(PaymentMethod).filter(
|
|
PaymentMethod.id == pm_uuid,
|
|
PaymentMethod.user_id == current_user.id,
|
|
PaymentMethod.is_active == True
|
|
).first()
|
|
|
|
if not payment_method:
|
|
raise HTTPException(status_code=404, detail="Payment method not found")
|
|
|
|
# Detach from Stripe if it's a Stripe payment method
|
|
if payment_method.stripe_payment_method_id:
|
|
try:
|
|
stripe_key = get_setting(db, 'stripe_secret_key', decrypt=True)
|
|
if not stripe_key:
|
|
stripe_key = os.getenv("STRIPE_SECRET_KEY")
|
|
stripe.api_key = stripe_key
|
|
|
|
stripe.PaymentMethod.detach(payment_method.stripe_payment_method_id)
|
|
logger.info(f"Detached Stripe payment method {payment_method.stripe_payment_method_id}")
|
|
except stripe.error.StripeError as e:
|
|
logger.warning(f"Failed to detach Stripe payment method: {str(e)}")
|
|
|
|
# Soft delete
|
|
payment_method.is_active = False
|
|
payment_method.is_default = False
|
|
db.commit()
|
|
|
|
logger.info(f"Deleted payment method {payment_method_id} for user {current_user.id}")
|
|
|
|
return {"message": "Payment method deleted"}
|
|
|
|
|
|
# ============================================================
|
|
# Admin Payment Method Management Endpoints
|
|
# ============================================================
|
|
|
|
@api_router.get("/admin/users/{user_id}/payment-methods")
|
|
async def admin_list_user_payment_methods(
|
|
user_id: str,
|
|
current_user: User = Depends(require_permission("payment_methods.view")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Admin: List a user's payment methods (masked)."""
|
|
try:
|
|
user_uuid = uuid.UUID(user_id)
|
|
except ValueError:
|
|
raise HTTPException(status_code=400, detail="Invalid user ID")
|
|
|
|
user = db.query(User).filter(User.id == user_uuid).first()
|
|
if not user:
|
|
raise HTTPException(status_code=404, detail="User not found")
|
|
|
|
methods = db.query(PaymentMethod).filter(
|
|
PaymentMethod.user_id == user_uuid,
|
|
PaymentMethod.is_active == True
|
|
).order_by(PaymentMethod.is_default.desc(), PaymentMethod.created_at.desc()).all()
|
|
|
|
return [{
|
|
"id": str(m.id),
|
|
"card_brand": m.card_brand,
|
|
"card_last4": m.card_last4,
|
|
"card_exp_month": m.card_exp_month,
|
|
"card_exp_year": m.card_exp_year,
|
|
"card_funding": m.card_funding,
|
|
"payment_type": m.payment_type.value,
|
|
"is_default": m.is_default,
|
|
"is_manual": m.is_manual,
|
|
"manual_notes": m.manual_notes if m.is_manual else None,
|
|
"created_at": m.created_at.isoformat(),
|
|
# Sensitive data masked
|
|
"stripe_payment_method_id": None
|
|
} for m in methods]
|
|
|
|
|
|
@api_router.post("/admin/users/{user_id}/payment-methods/reveal")
|
|
async def admin_reveal_payment_details(
|
|
user_id: str,
|
|
request: AdminRevealRequest,
|
|
current_user: User = Depends(require_permission("payment_methods.view_sensitive")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Admin: Reveal full payment method details (requires password confirmation)."""
|
|
try:
|
|
user_uuid = uuid.UUID(user_id)
|
|
except ValueError:
|
|
raise HTTPException(status_code=400, detail="Invalid user ID")
|
|
|
|
# Verify admin's password
|
|
if not verify_password(request.password, current_user.password_hash):
|
|
logger.warning(f"Admin {current_user.email} failed password verification for payment reveal")
|
|
raise HTTPException(status_code=401, detail="Invalid password")
|
|
|
|
user = db.query(User).filter(User.id == user_uuid).first()
|
|
if not user:
|
|
raise HTTPException(status_code=404, detail="User not found")
|
|
|
|
methods = db.query(PaymentMethod).filter(
|
|
PaymentMethod.user_id == user_uuid,
|
|
PaymentMethod.is_active == True
|
|
).order_by(PaymentMethod.is_default.desc(), PaymentMethod.created_at.desc()).all()
|
|
|
|
# Log sensitive access
|
|
logger.info(f"Admin {current_user.email} revealed payment details for user {user_id}")
|
|
|
|
return [{
|
|
"id": str(m.id),
|
|
"card_brand": m.card_brand,
|
|
"card_last4": m.card_last4,
|
|
"card_exp_month": m.card_exp_month,
|
|
"card_exp_year": m.card_exp_year,
|
|
"card_funding": m.card_funding,
|
|
"payment_type": m.payment_type.value,
|
|
"is_default": m.is_default,
|
|
"is_manual": m.is_manual,
|
|
"manual_notes": m.manual_notes,
|
|
"created_at": m.created_at.isoformat(),
|
|
"stripe_payment_method_id": m.stripe_payment_method_id
|
|
} for m in methods]
|
|
|
|
|
|
@api_router.post("/admin/users/{user_id}/payment-methods/setup-intent")
|
|
async def admin_create_setup_intent_for_user(
|
|
user_id: str,
|
|
current_user: User = Depends(require_permission("payment_methods.create")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Admin: Create a SetupIntent for adding a card on behalf of a user."""
|
|
import stripe
|
|
|
|
try:
|
|
user_uuid = uuid.UUID(user_id)
|
|
except ValueError:
|
|
raise HTTPException(status_code=400, detail="Invalid user ID")
|
|
|
|
user = db.query(User).filter(User.id == user_uuid).first()
|
|
if not user:
|
|
raise HTTPException(status_code=404, detail="User not found")
|
|
|
|
# Get or create Stripe customer for the target user
|
|
customer_id = get_or_create_stripe_customer(user, db)
|
|
|
|
# Get Stripe API key
|
|
stripe_key = get_setting(db, 'stripe_secret_key', decrypt=True)
|
|
if not stripe_key:
|
|
stripe_key = os.getenv("STRIPE_SECRET_KEY")
|
|
stripe.api_key = stripe_key
|
|
|
|
# Create SetupIntent
|
|
setup_intent = stripe.SetupIntent.create(
|
|
customer=customer_id,
|
|
payment_method_types=["card"],
|
|
metadata={
|
|
"user_id": str(user.id),
|
|
"created_by_admin": str(current_user.id)
|
|
}
|
|
)
|
|
|
|
logger.info(f"Admin {current_user.email} created SetupIntent for user {user_id}")
|
|
|
|
return {
|
|
"client_secret": setup_intent.client_secret,
|
|
"setup_intent_id": setup_intent.id
|
|
}
|
|
|
|
|
|
@api_router.post("/admin/users/{user_id}/payment-methods")
|
|
async def admin_save_payment_method_for_user(
|
|
user_id: str,
|
|
request: PaymentMethodSaveRequest,
|
|
current_user: User = Depends(require_permission("payment_methods.create")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Admin: Save a payment method on behalf of a user."""
|
|
import stripe
|
|
|
|
try:
|
|
user_uuid = uuid.UUID(user_id)
|
|
except ValueError:
|
|
raise HTTPException(status_code=400, detail="Invalid user ID")
|
|
|
|
user = db.query(User).filter(User.id == user_uuid).first()
|
|
if not user:
|
|
raise HTTPException(status_code=404, detail="User not found")
|
|
|
|
# Refresh user to get latest data
|
|
db.refresh(user)
|
|
|
|
# Get Stripe API key
|
|
stripe_key = get_setting(db, 'stripe_secret_key', decrypt=True)
|
|
if not stripe_key:
|
|
stripe_key = os.getenv("STRIPE_SECRET_KEY")
|
|
stripe.api_key = stripe_key
|
|
|
|
# Retrieve payment method from Stripe
|
|
try:
|
|
pm = stripe.PaymentMethod.retrieve(request.stripe_payment_method_id)
|
|
except stripe.error.InvalidRequestError as e:
|
|
logger.error(f"Invalid payment method ID: {request.stripe_payment_method_id}, error: {str(e)}")
|
|
raise HTTPException(status_code=400, detail="Invalid payment method ID")
|
|
|
|
# Verify ownership - payment method must be attached to user's customer
|
|
pm_customer = pm.customer if hasattr(pm, 'customer') else None
|
|
logger.info(f"Admin verifying PM ownership: pm.customer={pm_customer}, user.stripe_customer_id={user.stripe_customer_id}")
|
|
|
|
if not user.stripe_customer_id:
|
|
raise HTTPException(status_code=403, detail="User does not have a Stripe customer ID")
|
|
|
|
if not pm_customer:
|
|
raise HTTPException(status_code=403, detail="Payment method is not attached to any customer")
|
|
|
|
if pm_customer != user.stripe_customer_id:
|
|
raise HTTPException(status_code=403, detail="Payment method not attached to user's Stripe customer")
|
|
|
|
# Check for duplicate
|
|
existing = db.query(PaymentMethod).filter(
|
|
PaymentMethod.stripe_payment_method_id == request.stripe_payment_method_id,
|
|
PaymentMethod.is_active == True
|
|
).first()
|
|
|
|
if existing:
|
|
raise HTTPException(status_code=400, detail="Payment method already saved")
|
|
|
|
# Handle default setting
|
|
if request.set_as_default:
|
|
db.query(PaymentMethod).filter(
|
|
PaymentMethod.user_id == user.id,
|
|
PaymentMethod.is_active == True
|
|
).update({"is_default": False})
|
|
|
|
# Extract card details
|
|
card = pm.card if pm.type == "card" else None
|
|
|
|
# Create payment method record
|
|
payment_method = PaymentMethod(
|
|
user_id=user.id,
|
|
stripe_payment_method_id=request.stripe_payment_method_id,
|
|
card_brand=card.brand if card else None,
|
|
card_last4=card.last4 if card else None,
|
|
card_exp_month=card.exp_month if card else None,
|
|
card_exp_year=card.exp_year if card else None,
|
|
card_funding=card.funding if card else None,
|
|
payment_type=PaymentMethodType.card,
|
|
is_default=request.set_as_default,
|
|
is_active=True,
|
|
is_manual=False,
|
|
created_by=current_user.id
|
|
)
|
|
|
|
db.add(payment_method)
|
|
db.commit()
|
|
db.refresh(payment_method)
|
|
|
|
logger.info(f"Admin {current_user.email} saved payment method {payment_method.id} for user {user_id}")
|
|
|
|
return {
|
|
"id": str(payment_method.id),
|
|
"card_brand": payment_method.card_brand,
|
|
"card_last4": payment_method.card_last4,
|
|
"message": "Payment method saved successfully"
|
|
}
|
|
|
|
|
|
@api_router.post("/admin/users/{user_id}/payment-methods/manual")
|
|
async def admin_record_manual_payment_method(
|
|
user_id: str,
|
|
request: AdminManualPaymentMethodRequest,
|
|
current_user: User = Depends(require_permission("payment_methods.create")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Admin: Record a manual payment method (cash, check, bank transfer)."""
|
|
try:
|
|
user_uuid = uuid.UUID(user_id)
|
|
except ValueError:
|
|
raise HTTPException(status_code=400, detail="Invalid user ID")
|
|
|
|
user = db.query(User).filter(User.id == user_uuid).first()
|
|
if not user:
|
|
raise HTTPException(status_code=404, detail="User not found")
|
|
|
|
# Map payment type string to enum
|
|
payment_type_map = {
|
|
"cash": PaymentMethodType.cash,
|
|
"bank_transfer": PaymentMethodType.bank_transfer,
|
|
"check": PaymentMethodType.check
|
|
}
|
|
payment_type = payment_type_map.get(request.payment_type)
|
|
if not payment_type:
|
|
raise HTTPException(status_code=400, detail="Invalid payment type")
|
|
|
|
# Handle default setting
|
|
if request.set_as_default:
|
|
db.query(PaymentMethod).filter(
|
|
PaymentMethod.user_id == user.id,
|
|
PaymentMethod.is_active == True
|
|
).update({"is_default": False})
|
|
|
|
# Create manual payment method record
|
|
payment_method = PaymentMethod(
|
|
user_id=user.id,
|
|
stripe_payment_method_id=None,
|
|
payment_type=payment_type,
|
|
is_default=request.set_as_default,
|
|
is_active=True,
|
|
is_manual=True,
|
|
manual_notes=request.manual_notes,
|
|
created_by=current_user.id
|
|
)
|
|
|
|
db.add(payment_method)
|
|
db.commit()
|
|
db.refresh(payment_method)
|
|
|
|
logger.info(f"Admin {current_user.email} recorded manual payment method {payment_method.id} ({payment_type.value}) for user {user_id}")
|
|
|
|
return {
|
|
"id": str(payment_method.id),
|
|
"payment_type": payment_method.payment_type.value,
|
|
"message": "Manual payment method recorded successfully"
|
|
}
|
|
|
|
|
|
@api_router.put("/admin/users/{user_id}/payment-methods/{payment_method_id}/default")
|
|
async def admin_set_default_payment_method(
|
|
user_id: str,
|
|
payment_method_id: str,
|
|
current_user: User = Depends(require_permission("payment_methods.set_default")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Admin: Set a user's payment method as default."""
|
|
try:
|
|
user_uuid = uuid.UUID(user_id)
|
|
pm_uuid = uuid.UUID(payment_method_id)
|
|
except ValueError:
|
|
raise HTTPException(status_code=400, detail="Invalid ID format")
|
|
|
|
user = db.query(User).filter(User.id == user_uuid).first()
|
|
if not user:
|
|
raise HTTPException(status_code=404, detail="User not found")
|
|
|
|
payment_method = db.query(PaymentMethod).filter(
|
|
PaymentMethod.id == pm_uuid,
|
|
PaymentMethod.user_id == user_uuid,
|
|
PaymentMethod.is_active == True
|
|
).first()
|
|
|
|
if not payment_method:
|
|
raise HTTPException(status_code=404, detail="Payment method not found")
|
|
|
|
# Unset all other defaults
|
|
db.query(PaymentMethod).filter(
|
|
PaymentMethod.user_id == user_uuid,
|
|
PaymentMethod.is_active == True
|
|
).update({"is_default": False})
|
|
|
|
# Set this one as default
|
|
payment_method.is_default = True
|
|
db.commit()
|
|
|
|
logger.info(f"Admin {current_user.email} set default payment method {payment_method_id} for user {user_id}")
|
|
|
|
return {"message": "Default payment method updated", "id": str(payment_method.id)}
|
|
|
|
|
|
@api_router.delete("/admin/users/{user_id}/payment-methods/{payment_method_id}")
|
|
async def admin_delete_payment_method(
|
|
user_id: str,
|
|
payment_method_id: str,
|
|
current_user: User = Depends(require_permission("payment_methods.delete")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Admin: Delete a user's payment method."""
|
|
import stripe
|
|
|
|
try:
|
|
user_uuid = uuid.UUID(user_id)
|
|
pm_uuid = uuid.UUID(payment_method_id)
|
|
except ValueError:
|
|
raise HTTPException(status_code=400, detail="Invalid ID format")
|
|
|
|
user = db.query(User).filter(User.id == user_uuid).first()
|
|
if not user:
|
|
raise HTTPException(status_code=404, detail="User not found")
|
|
|
|
payment_method = db.query(PaymentMethod).filter(
|
|
PaymentMethod.id == pm_uuid,
|
|
PaymentMethod.user_id == user_uuid,
|
|
PaymentMethod.is_active == True
|
|
).first()
|
|
|
|
if not payment_method:
|
|
raise HTTPException(status_code=404, detail="Payment method not found")
|
|
|
|
# Detach from Stripe if it's a Stripe payment method
|
|
if payment_method.stripe_payment_method_id:
|
|
try:
|
|
stripe_key = get_setting(db, 'stripe_secret_key', decrypt=True)
|
|
if not stripe_key:
|
|
stripe_key = os.getenv("STRIPE_SECRET_KEY")
|
|
stripe.api_key = stripe_key
|
|
|
|
stripe.PaymentMethod.detach(payment_method.stripe_payment_method_id)
|
|
logger.info(f"Detached Stripe payment method {payment_method.stripe_payment_method_id}")
|
|
except stripe.error.StripeError as e:
|
|
logger.warning(f"Failed to detach Stripe payment method: {str(e)}")
|
|
|
|
# Soft delete
|
|
payment_method.is_active = False
|
|
payment_method.is_default = False
|
|
db.commit()
|
|
|
|
logger.info(f"Admin {current_user.email} deleted payment method {payment_method_id} for user {user_id}")
|
|
|
|
return {"message": "Payment method deleted"}
|
|
|
|
|
|
@api_router.post("/contact")
|
|
async def submit_contact_form(
|
|
request: ContactFormRequest,
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""Handle contact form submission and send email to admin."""
|
|
|
|
try:
|
|
# Get admin email from environment or use default
|
|
admin_email = os.getenv("ADMIN_EMAIL", "info@loaftx.org")
|
|
|
|
# Create email content
|
|
subject = f"New Contact Form Submission: {request.subject}"
|
|
|
|
html_content = f"""
|
|
<!DOCTYPE html>
|
|
<html>
|
|
<head>
|
|
<style>
|
|
body {{ font-family: 'Nunito Sans', Arial, sans-serif; line-height: 1.6; color: #422268; }}
|
|
.container {{ max-width: 600px; margin: 0 auto; padding: 20px; }}
|
|
.header {{ background: linear-gradient(135deg, #644c9f 0%, #48286e 100%); padding: 30px; text-align: center; border-radius: 10px 10px 0 0; }}
|
|
.header h1 {{ color: white; margin: 0; font-family: 'Inter', sans-serif; }}
|
|
.content {{ background: #FFFFFF; padding: 30px; border: 1px solid #ddd8eb; border-radius: 0 0 10px 10px; }}
|
|
.field {{ margin-bottom: 20px; }}
|
|
.field-label {{ font-weight: 600; color: #48286e; margin-bottom: 5px; }}
|
|
.field-value {{ color: #664fa3; }}
|
|
.message-box {{ background: #f1eef9; padding: 20px; border-left: 4px solid #ff9e77; margin-top: 20px; }}
|
|
</style>
|
|
</head>
|
|
<body>
|
|
<div class="container">
|
|
<div class="header">
|
|
<h1>New Contact Form Submission</h1>
|
|
</div>
|
|
<div class="content">
|
|
<div class="field">
|
|
<div class="field-label">From:</div>
|
|
<div class="field-value">{request.first_name} {request.last_name}</div>
|
|
</div>
|
|
|
|
<div class="field">
|
|
<div class="field-label">Email:</div>
|
|
<div class="field-value">{request.email}</div>
|
|
</div>
|
|
|
|
<div class="field">
|
|
<div class="field-label">Subject:</div>
|
|
<div class="field-value">{request.subject}</div>
|
|
</div>
|
|
|
|
<div class="message-box">
|
|
<div class="field-label">Message:</div>
|
|
<div class="field-value" style="margin-top: 10px; white-space: pre-wrap;">{request.message}</div>
|
|
</div>
|
|
|
|
<p style="margin-top: 30px; padding-top: 20px; border-top: 1px solid #ddd8eb; color: #664fa3; font-size: 14px;">
|
|
Reply directly to this email to respond to {request.first_name}.
|
|
</p>
|
|
</div>
|
|
</div>
|
|
</body>
|
|
</html>
|
|
"""
|
|
|
|
# Import send_email from email_service
|
|
from email_service import send_email
|
|
|
|
# Send email to admin
|
|
email_sent = await send_email(admin_email, subject, html_content)
|
|
|
|
if not email_sent:
|
|
logger.error(f"Failed to send contact form email from {request.email}")
|
|
raise HTTPException(status_code=500, detail="Failed to send contact form. Please try again later.")
|
|
|
|
logger.info(f"Contact form submitted by {request.first_name} {request.last_name} ({request.email})")
|
|
|
|
return {
|
|
"message": "Contact form submitted successfully. We'll get back to you soon!",
|
|
"success": True
|
|
}
|
|
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"Error processing contact form: {str(e)}")
|
|
raise HTTPException(status_code=500, detail="Failed to process contact form")
|
|
|
|
@app.post("/api/webhooks/stripe")
|
|
async def stripe_webhook(request: Request, db: Session = Depends(get_db)):
|
|
"""Handle Stripe webhook events. Note: This endpoint is NOT on the api_router to avoid /api/api prefix."""
|
|
|
|
# Get raw payload and signature
|
|
payload = await request.body()
|
|
sig_header = request.headers.get("stripe-signature")
|
|
|
|
if not sig_header:
|
|
raise HTTPException(status_code=400, detail="Missing stripe-signature header")
|
|
|
|
try:
|
|
# Verify webhook signature (pass db for reading webhook secret from database)
|
|
event = verify_webhook_signature(payload, sig_header, db)
|
|
except ValueError as e:
|
|
logger.error(f"Webhook signature verification failed: {str(e)}")
|
|
raise HTTPException(status_code=400, detail=str(e))
|
|
|
|
# Handle checkout.session.completed event
|
|
if event["type"] == "checkout.session.completed":
|
|
session = event["data"]["object"]
|
|
metadata = session.get("metadata", {})
|
|
|
|
# Check if this is a donation (has donation_id in metadata)
|
|
if "donation_id" in metadata:
|
|
donation_id = uuid.UUID(metadata["donation_id"])
|
|
donation = db.query(Donation).filter(Donation.id == donation_id).first()
|
|
|
|
if donation:
|
|
# Get Stripe API key from database
|
|
import stripe
|
|
stripe_key = get_setting(db, 'stripe_secret_key', decrypt=True)
|
|
if not stripe_key:
|
|
stripe_key = os.getenv("STRIPE_SECRET_KEY")
|
|
stripe.api_key = stripe_key
|
|
|
|
# Extract basic payment info
|
|
payment_intent_id = session.get('payment_intent')
|
|
donation.status = DonationStatus.completed
|
|
donation.stripe_payment_intent_id = payment_intent_id
|
|
donation.stripe_customer_id = session.get('customer')
|
|
donation.payment_method = 'card'
|
|
donation.payment_completed_at = datetime.fromtimestamp(session.get('created'), tz=timezone.utc)
|
|
|
|
# Capture donor email and name from Stripe session if not already set
|
|
if not donation.donor_email and session.get('customer_details'):
|
|
customer_details = session.get('customer_details')
|
|
donation.donor_email = customer_details.get('email')
|
|
if not donation.donor_name and customer_details.get('name'):
|
|
donation.donor_name = customer_details.get('name')
|
|
|
|
# Retrieve PaymentIntent to get charge details
|
|
try:
|
|
if payment_intent_id:
|
|
payment_intent = stripe.PaymentIntent.retrieve(payment_intent_id)
|
|
|
|
# Get charge ID from latest_charge
|
|
charge_id = payment_intent.latest_charge if hasattr(payment_intent, 'latest_charge') else None
|
|
|
|
if charge_id:
|
|
# Retrieve the charge to get full details
|
|
charge = stripe.Charge.retrieve(charge_id)
|
|
donation.stripe_charge_id = charge.id
|
|
donation.stripe_receipt_url = charge.receipt_url
|
|
|
|
# Get card details
|
|
if hasattr(charge, 'payment_method_details') and charge.payment_method_details and charge.payment_method_details.card:
|
|
card = charge.payment_method_details.card
|
|
donation.card_last4 = card.last4
|
|
donation.card_brand = card.brand.capitalize() # visa -> Visa
|
|
except Exception as e:
|
|
logger.error(f"Failed to retrieve Stripe payment details for donation: {str(e)}")
|
|
|
|
donation.updated_at = datetime.now(timezone.utc)
|
|
db.commit()
|
|
|
|
# Send thank you email only if donor_email exists
|
|
if donation.donor_email:
|
|
try:
|
|
from email_service import send_donation_thank_you_email
|
|
donor_first_name = donation.donor_name.split()[0] if donation.donor_name else "Friend"
|
|
await send_donation_thank_you_email(
|
|
donation.donor_email,
|
|
donor_first_name,
|
|
donation.amount_cents
|
|
)
|
|
except Exception as e:
|
|
logger.error(f"Failed to send donation thank you email: {str(e)}")
|
|
else:
|
|
logger.warning(f"Skipping thank you email for donation {donation.id}: no donor email")
|
|
|
|
logger.info(f"Donation completed: ${donation.amount_cents/100:.2f} (ID: {donation.id})")
|
|
else:
|
|
logger.error(f"Donation not found: {donation_id}")
|
|
|
|
# Otherwise handle subscription payment (existing logic)
|
|
else:
|
|
# Get metadata
|
|
user_id = metadata.get("user_id")
|
|
plan_id = metadata.get("plan_id")
|
|
base_amount = int(metadata.get("base_amount", 0))
|
|
donation_amount = int(metadata.get("donation_amount", 0))
|
|
total_amount = int(metadata.get("total_amount", session.get("amount_total", 0)))
|
|
|
|
if not user_id or not plan_id:
|
|
logger.error("Missing user_id or plan_id in webhook metadata")
|
|
return {"status": "error", "message": "Missing metadata"}
|
|
|
|
# Get user and plan
|
|
user = db.query(User).filter(User.id == user_id).first()
|
|
plan = db.query(SubscriptionPlan).filter(SubscriptionPlan.id == plan_id).first()
|
|
|
|
if user and plan:
|
|
# Check if subscription already exists (idempotency)
|
|
existing_subscription = db.query(Subscription).filter(
|
|
Subscription.stripe_subscription_id == session.get("subscription")
|
|
).first()
|
|
|
|
if not existing_subscription:
|
|
# Get Stripe API key from database
|
|
import stripe
|
|
stripe_key = get_setting(db, 'stripe_secret_key', decrypt=True)
|
|
if not stripe_key:
|
|
stripe_key = os.getenv("STRIPE_SECRET_KEY")
|
|
stripe.api_key = stripe_key
|
|
|
|
# Calculate subscription period using custom billing cycle if enabled
|
|
from payment_service import calculate_subscription_period
|
|
start_date, end_date = calculate_subscription_period(plan)
|
|
|
|
# Extract basic payment info
|
|
payment_intent_id = session.get('payment_intent')
|
|
subscription_id = session.get("subscription")
|
|
|
|
# Create subscription record with donation tracking
|
|
subscription = Subscription(
|
|
user_id=user.id,
|
|
plan_id=plan.id,
|
|
stripe_subscription_id=subscription_id,
|
|
stripe_customer_id=session.get("customer"),
|
|
status=SubscriptionStatus.active,
|
|
start_date=start_date,
|
|
end_date=end_date,
|
|
amount_paid_cents=total_amount,
|
|
base_subscription_cents=base_amount or plan.minimum_price_cents,
|
|
donation_cents=donation_amount,
|
|
payment_method="stripe",
|
|
stripe_payment_intent_id=payment_intent_id,
|
|
payment_completed_at=datetime.fromtimestamp(session.get('created'), tz=timezone.utc)
|
|
)
|
|
|
|
# Retrieve PaymentIntent and Subscription to get detailed transaction info
|
|
try:
|
|
if payment_intent_id:
|
|
payment_intent = stripe.PaymentIntent.retrieve(payment_intent_id)
|
|
|
|
# Get charge ID from latest_charge
|
|
charge_id = payment_intent.latest_charge if hasattr(payment_intent, 'latest_charge') else None
|
|
|
|
if charge_id:
|
|
# Retrieve the charge to get full details
|
|
charge = stripe.Charge.retrieve(charge_id)
|
|
subscription.stripe_charge_id = charge.id
|
|
subscription.stripe_receipt_url = charge.receipt_url
|
|
|
|
# Get card details
|
|
if hasattr(charge, 'payment_method_details') and charge.payment_method_details and charge.payment_method_details.card:
|
|
card = charge.payment_method_details.card
|
|
subscription.card_last4 = card.last4
|
|
subscription.card_brand = card.brand.capitalize() # visa -> Visa
|
|
|
|
# Get invoice ID from subscription
|
|
if subscription_id:
|
|
stripe_subscription = stripe.Subscription.retrieve(subscription_id)
|
|
if hasattr(stripe_subscription, 'latest_invoice') and stripe_subscription.latest_invoice:
|
|
subscription.stripe_invoice_id = stripe_subscription.latest_invoice
|
|
|
|
except Exception as e:
|
|
logger.error(f"Failed to retrieve Stripe payment details for subscription: {str(e)}")
|
|
|
|
db.add(subscription)
|
|
|
|
# Update user status and role
|
|
user.status = UserStatus.active
|
|
set_user_role(user, UserRole.member, db)
|
|
# Set member_since only if not already set (first time activation)
|
|
if not user.member_since:
|
|
user.member_since = datetime.now(timezone.utc)
|
|
user.updated_at = datetime.now(timezone.utc)
|
|
|
|
db.commit()
|
|
|
|
logger.info(
|
|
f"Subscription created for user {user.email}: "
|
|
f"${base_amount/100:.2f} base + ${donation_amount/100:.2f} donation = ${total_amount/100:.2f}"
|
|
)
|
|
else:
|
|
logger.info(f"Subscription already exists for session {session.get('id')}")
|
|
else:
|
|
logger.error(f"User or plan not found: user_id={user_id}, plan_id={plan_id}")
|
|
|
|
return {"status": "success"}
|
|
|
|
# ============================================================================
|
|
# ADMIN SETTINGS ENDPOINTS
|
|
# ============================================================================
|
|
|
|
# Helper functions for system settings
|
|
def get_setting(db: Session, key: str, decrypt: bool = False) -> str | None:
|
|
"""
|
|
Get a system setting value from database.
|
|
|
|
Args:
|
|
db: Database session
|
|
key: Setting key to retrieve
|
|
decrypt: If True and setting_type is 'encrypted', decrypt the value
|
|
|
|
Returns:
|
|
Setting value or None if not found
|
|
"""
|
|
from models import SystemSettings, SettingType
|
|
from encryption_service import get_encryption_service
|
|
|
|
setting = db.query(SystemSettings).filter(SystemSettings.setting_key == key).first()
|
|
if not setting:
|
|
return None
|
|
|
|
value = setting.setting_value
|
|
if decrypt and setting.setting_type == SettingType.encrypted and value:
|
|
try:
|
|
encryption_service = get_encryption_service()
|
|
value = encryption_service.decrypt(value)
|
|
except Exception as e:
|
|
print(f"Failed to decrypt setting {key}: {e}")
|
|
return None
|
|
|
|
return value
|
|
|
|
|
|
def set_setting(
|
|
db: Session,
|
|
key: str,
|
|
value: str,
|
|
user_id: str,
|
|
setting_type: str = "plaintext",
|
|
description: str = None,
|
|
is_sensitive: bool = False,
|
|
encrypt: bool = False
|
|
) -> None:
|
|
"""
|
|
Set a system setting value in database.
|
|
|
|
Args:
|
|
db: Database session
|
|
key: Setting key
|
|
value: Setting value
|
|
user_id: ID of user making the change
|
|
setting_type: Type of setting (plaintext, encrypted, json)
|
|
description: Human-readable description
|
|
is_sensitive: Whether this is sensitive data
|
|
encrypt: If True, encrypt the value before storing
|
|
"""
|
|
from models import SystemSettings, SettingType
|
|
from encryption_service import get_encryption_service
|
|
|
|
# Encrypt value if requested
|
|
if encrypt and value:
|
|
encryption_service = get_encryption_service()
|
|
value = encryption_service.encrypt(value)
|
|
setting_type = "encrypted"
|
|
|
|
# Find or create setting
|
|
setting = db.query(SystemSettings).filter(SystemSettings.setting_key == key).first()
|
|
|
|
if setting:
|
|
# Update existing
|
|
setting.setting_value = value
|
|
setting.setting_type = SettingType[setting_type]
|
|
setting.updated_by = user_id
|
|
setting.updated_at = datetime.now(timezone.utc)
|
|
if description:
|
|
setting.description = description
|
|
setting.is_sensitive = is_sensitive
|
|
else:
|
|
# Create new
|
|
setting = SystemSettings(
|
|
setting_key=key,
|
|
setting_value=value,
|
|
setting_type=SettingType[setting_type],
|
|
description=description,
|
|
updated_by=user_id,
|
|
is_sensitive=is_sensitive
|
|
)
|
|
db.add(setting)
|
|
|
|
db.commit()
|
|
|
|
@api_router.get("/admin/settings/stripe/status")
|
|
async def get_stripe_status(
|
|
current_user: User = Depends(get_current_superadmin),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Get Stripe integration status (superadmin only).
|
|
|
|
Returns:
|
|
- configured: Whether credentials exist in database
|
|
- secret_key_prefix: First 10 chars of secret key (for verification)
|
|
- webhook_configured: Whether webhook secret exists
|
|
- environment: test or live (based on key prefix)
|
|
- webhook_url: Full webhook URL for Stripe configuration
|
|
"""
|
|
import os
|
|
|
|
# Read from database
|
|
secret_key = get_setting(db, 'stripe_secret_key', decrypt=True)
|
|
webhook_secret = get_setting(db, 'stripe_webhook_secret', decrypt=True)
|
|
|
|
configured = bool(secret_key)
|
|
environment = 'unknown'
|
|
|
|
if secret_key:
|
|
if secret_key.startswith('sk_test_'):
|
|
environment = 'test'
|
|
elif secret_key.startswith('sk_live_'):
|
|
environment = 'live'
|
|
|
|
# Get backend URL from environment for webhook URL
|
|
# Try multiple environment variable patterns for flexibility
|
|
backend_url = (
|
|
os.environ.get('BACKEND_URL') or
|
|
os.environ.get('API_URL') or
|
|
f"http://{os.environ.get('HOST', 'localhost')}:{os.environ.get('PORT', '8000')}"
|
|
)
|
|
webhook_url = f"{backend_url}/api/webhooks/stripe"
|
|
|
|
return {
|
|
"configured": configured,
|
|
"secret_key_prefix": secret_key[:10] if secret_key else None,
|
|
"secret_key_set": bool(secret_key),
|
|
"webhook_secret_set": bool(webhook_secret),
|
|
"environment": environment,
|
|
"webhook_url": webhook_url,
|
|
"instructions": {
|
|
"location": "Database (system_settings table)",
|
|
"required_settings": [
|
|
"stripe_secret_key (sk_test_... or sk_live_...)",
|
|
"stripe_webhook_secret (whsec_...)"
|
|
],
|
|
"restart_required": "No - changes take effect immediately"
|
|
}
|
|
}
|
|
|
|
@api_router.post("/admin/settings/stripe/test-connection")
|
|
async def test_stripe_connection(
|
|
current_user: User = Depends(get_current_superadmin),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Test Stripe API connection (superadmin only).
|
|
|
|
Performs a simple API call to verify credentials work.
|
|
"""
|
|
import stripe
|
|
|
|
# Read from database
|
|
secret_key = get_setting(db, 'stripe_secret_key', decrypt=True)
|
|
|
|
if not secret_key:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail="STRIPE_SECRET_KEY not configured in database. Please configure Stripe settings first."
|
|
)
|
|
|
|
try:
|
|
stripe.api_key = secret_key
|
|
|
|
# Make a simple API call to test connection
|
|
balance = stripe.Balance.retrieve()
|
|
|
|
return {
|
|
"success": True,
|
|
"message": "Stripe connection successful",
|
|
"environment": "test" if secret_key.startswith('sk_test_') else "live",
|
|
"balance": {
|
|
"available": balance.available,
|
|
"pending": balance.pending
|
|
}
|
|
}
|
|
except stripe.error.AuthenticationError as e:
|
|
raise HTTPException(
|
|
status_code=401,
|
|
detail=f"Stripe authentication failed: {str(e)}"
|
|
)
|
|
except Exception as e:
|
|
raise HTTPException(
|
|
status_code=500,
|
|
detail=f"Stripe connection test failed: {str(e)}"
|
|
)
|
|
|
|
|
|
class UpdateStripeSettingsRequest(BaseModel):
|
|
"""Request model for updating Stripe settings"""
|
|
secret_key: str = Field(..., min_length=1, description="Stripe secret key (sk_test_... or sk_live_...)")
|
|
webhook_secret: str = Field(..., min_length=1, description="Stripe webhook secret (whsec_...)")
|
|
|
|
|
|
@api_router.put("/admin/settings/stripe")
|
|
async def update_stripe_settings(
|
|
request: UpdateStripeSettingsRequest,
|
|
current_user: User = Depends(get_current_superadmin),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Update Stripe integration settings (superadmin only).
|
|
|
|
Stores Stripe credentials encrypted in the database.
|
|
Changes take effect immediately without server restart.
|
|
"""
|
|
# Validate secret key format
|
|
if not (request.secret_key.startswith('sk_test_') or request.secret_key.startswith('sk_live_')):
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail="Invalid Stripe secret key format. Must start with 'sk_test_' or 'sk_live_'"
|
|
)
|
|
|
|
# Validate webhook secret format
|
|
if not request.webhook_secret.startswith('whsec_'):
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail="Invalid Stripe webhook secret format. Must start with 'whsec_'"
|
|
)
|
|
|
|
try:
|
|
# Store secret key (encrypted)
|
|
set_setting(
|
|
db=db,
|
|
key='stripe_secret_key',
|
|
value=request.secret_key,
|
|
user_id=str(current_user.id),
|
|
description='Stripe API secret key for payment processing',
|
|
is_sensitive=True,
|
|
encrypt=True
|
|
)
|
|
|
|
# Store webhook secret (encrypted)
|
|
set_setting(
|
|
db=db,
|
|
key='stripe_webhook_secret',
|
|
value=request.webhook_secret,
|
|
user_id=str(current_user.id),
|
|
description='Stripe webhook secret for verifying webhook signatures',
|
|
is_sensitive=True,
|
|
encrypt=True
|
|
)
|
|
|
|
# Determine environment
|
|
environment = 'test' if request.secret_key.startswith('sk_test_') else 'live'
|
|
|
|
return {
|
|
"success": True,
|
|
"message": "Stripe settings updated successfully",
|
|
"environment": environment,
|
|
"updated_at": datetime.now(timezone.utc).isoformat(),
|
|
"updated_by": f"{current_user.first_name} {current_user.last_name}"
|
|
}
|
|
|
|
except Exception as e:
|
|
raise HTTPException(
|
|
status_code=500,
|
|
detail=f"Failed to update Stripe settings: {str(e)}"
|
|
)
|
|
|
|
|
|
# ============================================================================
|
|
# Member Tiers Settings
|
|
# ============================================================================
|
|
|
|
# Default tier configuration
|
|
DEFAULT_MEMBER_TIERS = {
|
|
"tiers": [
|
|
{
|
|
"id": "new_member",
|
|
"label": "New Member",
|
|
"minYears": 0,
|
|
"maxYears": 0.999,
|
|
"iconKey": "sparkle",
|
|
"badgeClass": "bg-blue-100 text-blue-800 border-blue-200"
|
|
},
|
|
{
|
|
"id": "member_1_year",
|
|
"label": "1 Year Member",
|
|
"minYears": 1,
|
|
"maxYears": 2.999,
|
|
"iconKey": "star",
|
|
"badgeClass": "bg-green-100 text-green-800 border-green-200"
|
|
},
|
|
{
|
|
"id": "member_3_year",
|
|
"label": "3+ Year Member",
|
|
"minYears": 3,
|
|
"maxYears": 4.999,
|
|
"iconKey": "award",
|
|
"badgeClass": "bg-purple-100 text-purple-800 border-purple-200"
|
|
},
|
|
{
|
|
"id": "veteran",
|
|
"label": "Veteran Member",
|
|
"minYears": 5,
|
|
"maxYears": 999,
|
|
"iconKey": "crown",
|
|
"badgeClass": "bg-amber-100 text-amber-800 border-amber-200"
|
|
}
|
|
]
|
|
}
|
|
|
|
|
|
class MemberTier(BaseModel):
|
|
"""Single tier definition"""
|
|
id: str = Field(..., min_length=1, max_length=50)
|
|
label: str = Field(..., min_length=1, max_length=100)
|
|
minYears: float = Field(..., ge=0)
|
|
maxYears: float = Field(..., gt=0)
|
|
iconKey: str = Field(..., min_length=1, max_length=50)
|
|
badgeClass: str = Field(..., min_length=1, max_length=200)
|
|
|
|
|
|
class MemberTiersConfig(BaseModel):
|
|
"""Member tiers configuration"""
|
|
tiers: List[MemberTier] = Field(..., min_length=1, max_length=10)
|
|
|
|
@validator('tiers')
|
|
def validate_tiers_no_overlap(cls, tiers):
|
|
"""Ensure tiers are sorted and don't overlap"""
|
|
sorted_tiers = sorted(tiers, key=lambda t: t.minYears)
|
|
|
|
for i in range(len(sorted_tiers) - 1):
|
|
current = sorted_tiers[i]
|
|
next_tier = sorted_tiers[i + 1]
|
|
if current.maxYears >= next_tier.minYears:
|
|
raise ValueError(
|
|
f"Tier '{current.label}' (max: {current.maxYears}) overlaps with "
|
|
f"'{next_tier.label}' (min: {next_tier.minYears})"
|
|
)
|
|
|
|
return sorted_tiers
|
|
|
|
|
|
@api_router.get("/settings/member-tiers")
|
|
async def get_member_tiers_public(
|
|
current_user: User = Depends(get_active_member),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Get member tier configuration (for active members/staff).
|
|
|
|
Returns the tier definitions used to display membership badges.
|
|
"""
|
|
import json
|
|
|
|
tiers_json = get_setting(db, 'member_tiers')
|
|
|
|
if tiers_json:
|
|
try:
|
|
return json.loads(tiers_json)
|
|
except json.JSONDecodeError:
|
|
# Fall back to default if stored JSON is invalid
|
|
return DEFAULT_MEMBER_TIERS
|
|
|
|
return DEFAULT_MEMBER_TIERS
|
|
|
|
|
|
@api_router.get("/admin/settings/member-tiers")
|
|
async def get_member_tiers_admin(
|
|
current_user: User = Depends(get_current_admin_user),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Get member tier configuration (admin view).
|
|
|
|
Returns the tier definitions along with metadata about last update.
|
|
"""
|
|
import json
|
|
|
|
tiers_json = get_setting(db, 'member_tiers')
|
|
|
|
# Get the setting record for metadata
|
|
setting = db.query(SystemSettings).filter(
|
|
SystemSettings.setting_key == 'member_tiers'
|
|
).first()
|
|
|
|
config = DEFAULT_MEMBER_TIERS
|
|
if tiers_json:
|
|
try:
|
|
config = json.loads(tiers_json)
|
|
except json.JSONDecodeError:
|
|
pass
|
|
|
|
return {
|
|
"config": config,
|
|
"is_default": tiers_json is None,
|
|
"updated_at": setting.updated_at.isoformat() if setting else None,
|
|
"updated_by": f"{setting.updater.first_name} {setting.updater.last_name}" if setting and setting.updater else None
|
|
}
|
|
|
|
|
|
@api_router.put("/admin/settings/member-tiers")
|
|
async def update_member_tiers(
|
|
request: MemberTiersConfig,
|
|
current_user: User = Depends(get_current_admin_user),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Update member tier configuration (admin only).
|
|
|
|
Validates tier definitions to ensure:
|
|
- No overlapping year ranges
|
|
- All required fields present
|
|
- Tiers are sorted by minYears
|
|
"""
|
|
import json
|
|
|
|
try:
|
|
# Convert to dict for JSON storage
|
|
tiers_dict = {"tiers": [tier.dict() for tier in request.tiers]}
|
|
tiers_json = json.dumps(tiers_dict)
|
|
|
|
# Store using set_setting helper
|
|
set_setting(
|
|
db=db,
|
|
key='member_tiers',
|
|
value=tiers_json,
|
|
user_id=str(current_user.id),
|
|
setting_type='json',
|
|
description='Member tier badge configuration',
|
|
is_sensitive=False
|
|
)
|
|
|
|
return {
|
|
"success": True,
|
|
"message": "Member tiers updated successfully",
|
|
"config": tiers_dict,
|
|
"updated_at": datetime.now(timezone.utc).isoformat(),
|
|
"updated_by": f"{current_user.first_name} {current_user.last_name}"
|
|
}
|
|
|
|
except Exception as e:
|
|
raise HTTPException(
|
|
status_code=500,
|
|
detail=f"Failed to update member tiers: {str(e)}"
|
|
)
|
|
|
|
|
|
@api_router.post("/admin/settings/member-tiers/reset")
|
|
async def reset_member_tiers(
|
|
current_user: User = Depends(get_current_superadmin),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Reset member tiers to default configuration (superadmin only).
|
|
"""
|
|
# Delete the setting to revert to defaults
|
|
db.query(SystemSettings).filter(
|
|
SystemSettings.setting_key == 'member_tiers'
|
|
).delete()
|
|
db.commit()
|
|
|
|
return {
|
|
"success": True,
|
|
"message": "Member tiers reset to defaults",
|
|
"config": DEFAULT_MEMBER_TIERS
|
|
}
|
|
|
|
|
|
# ============================================================================
|
|
# Theme Settings
|
|
# ============================================================================
|
|
|
|
# Default theme configuration
|
|
DEFAULT_THEME_CONFIG = {
|
|
"site_name": "LOAF - Lesbians Over Age Fifty",
|
|
"site_short_name": "LOAF",
|
|
"site_description": "A community organization for lesbians over age fifty in Houston and surrounding areas.",
|
|
"logo_url": None,
|
|
"favicon_url": None,
|
|
"colors": {
|
|
"primary": "280 47% 27%",
|
|
"primary_foreground": "0 0% 100%",
|
|
"accent": "24 86% 55%",
|
|
"brand_purple": "256 35% 47%",
|
|
"brand_orange": "24 86% 55%",
|
|
"brand_lavender": "262 46% 80%"
|
|
},
|
|
"meta_theme_color": "#664fa3"
|
|
}
|
|
|
|
# Simple in-memory cache for theme config
|
|
_theme_cache = {
|
|
"config": None,
|
|
"expires_at": None
|
|
}
|
|
THEME_CACHE_TTL_SECONDS = 300 # 5 minutes
|
|
|
|
|
|
def get_theme_config_cached(db: Session) -> dict:
|
|
"""Get theme config with caching."""
|
|
import json
|
|
from datetime import datetime, timezone
|
|
|
|
now = datetime.now(timezone.utc)
|
|
|
|
# Check cache
|
|
if _theme_cache["config"] and _theme_cache["expires_at"] and _theme_cache["expires_at"] > now:
|
|
return _theme_cache["config"]
|
|
|
|
# Build config from settings
|
|
config = dict(DEFAULT_THEME_CONFIG)
|
|
|
|
# Fetch all theme.* settings
|
|
theme_settings = db.query(SystemSettings).filter(
|
|
SystemSettings.setting_key.like('theme.%')
|
|
).all()
|
|
|
|
for setting in theme_settings:
|
|
key = setting.setting_key.replace('theme.', '')
|
|
value = setting.setting_value
|
|
|
|
if key == 'colors' and value:
|
|
try:
|
|
config['colors'] = json.loads(value)
|
|
except json.JSONDecodeError:
|
|
pass
|
|
elif key in config:
|
|
config[key] = value
|
|
|
|
# Update cache
|
|
_theme_cache["config"] = config
|
|
_theme_cache["expires_at"] = now + timedelta(seconds=THEME_CACHE_TTL_SECONDS)
|
|
|
|
return config
|
|
|
|
|
|
def invalidate_theme_cache():
|
|
"""Invalidate the theme config cache."""
|
|
_theme_cache["config"] = None
|
|
_theme_cache["expires_at"] = None
|
|
|
|
|
|
@api_router.get("/config/theme")
|
|
async def get_theme_config(db: Session = Depends(get_db)):
|
|
"""
|
|
Get public theme configuration.
|
|
|
|
This endpoint is public (no authentication required) and returns
|
|
the theme configuration for frontend initialization.
|
|
|
|
Returns cached config with 5-minute TTL for performance.
|
|
"""
|
|
return get_theme_config_cached(db)
|
|
|
|
|
|
@api_router.get("/admin/settings/theme")
|
|
async def get_theme_settings_admin(
|
|
current_user: User = Depends(require_permission("settings.view")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Get theme settings with metadata (admin view).
|
|
|
|
Returns the full theme configuration along with:
|
|
- Whether using default values
|
|
- Last update timestamp
|
|
- Who made the last update
|
|
"""
|
|
import json
|
|
|
|
config = dict(DEFAULT_THEME_CONFIG)
|
|
is_default = True
|
|
updated_at = None
|
|
updated_by = None
|
|
|
|
# Fetch all theme.* settings
|
|
theme_settings = db.query(SystemSettings).filter(
|
|
SystemSettings.setting_key.like('theme.%')
|
|
).all()
|
|
|
|
if theme_settings:
|
|
is_default = False
|
|
|
|
# Find the most recent update
|
|
latest_setting = max(theme_settings, key=lambda s: s.updated_at or s.created_at)
|
|
updated_at = latest_setting.updated_at or latest_setting.created_at
|
|
if latest_setting.updater:
|
|
updated_by = f"{latest_setting.updater.first_name} {latest_setting.updater.last_name}"
|
|
|
|
for setting in theme_settings:
|
|
key = setting.setting_key.replace('theme.', '')
|
|
value = setting.setting_value
|
|
|
|
if key == 'colors' and value:
|
|
try:
|
|
config['colors'] = json.loads(value)
|
|
except json.JSONDecodeError:
|
|
pass
|
|
elif key in config:
|
|
config[key] = value
|
|
|
|
return {
|
|
"config": config,
|
|
"is_default": is_default,
|
|
"updated_at": updated_at.isoformat() if updated_at else None,
|
|
"updated_by": updated_by
|
|
}
|
|
|
|
|
|
class ThemeSettingsUpdate(BaseModel):
|
|
"""Request model for updating theme settings"""
|
|
site_name: Optional[str] = Field(None, max_length=200, description="Full site name")
|
|
site_short_name: Optional[str] = Field(None, max_length=50, description="Short name for PWA")
|
|
site_description: Optional[str] = Field(None, max_length=500, description="Site description for SEO meta tag")
|
|
colors: Optional[dict] = Field(None, description="Color scheme as HSL values")
|
|
meta_theme_color: Optional[str] = Field(None, pattern=r'^#[0-9A-Fa-f]{6}$', description="PWA theme color (hex)")
|
|
|
|
|
|
@api_router.put("/admin/settings/theme")
|
|
async def update_theme_settings(
|
|
request: ThemeSettingsUpdate,
|
|
current_user: User = Depends(require_permission("settings.edit")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Update theme settings (admin only).
|
|
|
|
Updates one or more theme settings. Only provided fields are updated.
|
|
Changes are applied immediately and the cache is invalidated.
|
|
"""
|
|
import json
|
|
|
|
updates = {}
|
|
|
|
if request.site_name is not None:
|
|
set_setting(
|
|
db=db,
|
|
key='theme.site_name',
|
|
value=request.site_name,
|
|
user_id=str(current_user.id),
|
|
setting_type='plaintext',
|
|
description='Site name displayed in title and navigation',
|
|
is_sensitive=False
|
|
)
|
|
updates['site_name'] = request.site_name
|
|
|
|
if request.site_short_name is not None:
|
|
set_setting(
|
|
db=db,
|
|
key='theme.site_short_name',
|
|
value=request.site_short_name,
|
|
user_id=str(current_user.id),
|
|
setting_type='plaintext',
|
|
description='Short site name for PWA manifest',
|
|
is_sensitive=False
|
|
)
|
|
updates['site_short_name'] = request.site_short_name
|
|
|
|
if request.site_description is not None:
|
|
set_setting(
|
|
db=db,
|
|
key='theme.site_description',
|
|
value=request.site_description,
|
|
user_id=str(current_user.id),
|
|
setting_type='plaintext',
|
|
description='Site description for SEO meta tag',
|
|
is_sensitive=False
|
|
)
|
|
updates['site_description'] = request.site_description
|
|
|
|
if request.colors is not None:
|
|
set_setting(
|
|
db=db,
|
|
key='theme.colors',
|
|
value=json.dumps(request.colors),
|
|
user_id=str(current_user.id),
|
|
setting_type='json',
|
|
description='Theme color scheme as HSL values',
|
|
is_sensitive=False
|
|
)
|
|
updates['colors'] = request.colors
|
|
|
|
if request.meta_theme_color is not None:
|
|
set_setting(
|
|
db=db,
|
|
key='theme.meta_theme_color',
|
|
value=request.meta_theme_color,
|
|
user_id=str(current_user.id),
|
|
setting_type='plaintext',
|
|
description='PWA theme-color meta tag value',
|
|
is_sensitive=False
|
|
)
|
|
updates['meta_theme_color'] = request.meta_theme_color
|
|
|
|
# Invalidate cache
|
|
invalidate_theme_cache()
|
|
|
|
return {
|
|
"success": True,
|
|
"message": "Theme settings updated successfully",
|
|
"updated_fields": list(updates.keys()),
|
|
"updated_at": datetime.now(timezone.utc).isoformat(),
|
|
"updated_by": f"{current_user.first_name} {current_user.last_name}"
|
|
}
|
|
|
|
|
|
@api_router.post("/admin/settings/theme/logo")
|
|
async def upload_theme_logo(
|
|
file: UploadFile = File(...),
|
|
current_user: User = Depends(require_permission("settings.edit")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Upload organization logo (admin only).
|
|
|
|
Accepts PNG, JPEG, WebP, or SVG images.
|
|
Replaces any existing logo.
|
|
"""
|
|
r2 = get_r2_storage()
|
|
|
|
# Get current logo key for deletion
|
|
old_logo_key = get_setting(db, 'theme.logo_key')
|
|
|
|
# Delete old logo if exists
|
|
if old_logo_key:
|
|
try:
|
|
await r2.delete_file(old_logo_key)
|
|
except Exception as e:
|
|
print(f"Warning: Failed to delete old logo: {e}")
|
|
|
|
# Upload new logo
|
|
try:
|
|
public_url, object_key, file_size = await r2.upload_file(
|
|
file=file,
|
|
folder="branding",
|
|
allowed_types=r2.ALLOWED_BRANDING_TYPES,
|
|
max_size_bytes=5 * 1024 * 1024 # 5MB limit for logos
|
|
)
|
|
|
|
# Store URL and key in settings
|
|
set_setting(
|
|
db=db,
|
|
key='theme.logo_url',
|
|
value=public_url,
|
|
user_id=str(current_user.id),
|
|
setting_type='plaintext',
|
|
description='Organization logo URL',
|
|
is_sensitive=False
|
|
)
|
|
|
|
set_setting(
|
|
db=db,
|
|
key='theme.logo_key',
|
|
value=object_key,
|
|
user_id=str(current_user.id),
|
|
setting_type='plaintext',
|
|
description='R2 object key for logo (for deletion)',
|
|
is_sensitive=False
|
|
)
|
|
|
|
# Invalidate cache
|
|
invalidate_theme_cache()
|
|
|
|
return {
|
|
"success": True,
|
|
"message": "Logo uploaded successfully",
|
|
"logo_url": public_url,
|
|
"file_size": file_size
|
|
}
|
|
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
raise HTTPException(
|
|
status_code=500,
|
|
detail=f"Failed to upload logo: {str(e)}"
|
|
)
|
|
|
|
|
|
@api_router.post("/admin/settings/theme/favicon")
|
|
async def upload_theme_favicon(
|
|
file: UploadFile = File(...),
|
|
current_user: User = Depends(require_permission("settings.edit")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Upload site favicon (admin only).
|
|
|
|
Accepts ICO, PNG, or SVG images.
|
|
Replaces any existing favicon.
|
|
"""
|
|
r2 = get_r2_storage()
|
|
|
|
# Get current favicon key for deletion
|
|
old_favicon_key = get_setting(db, 'theme.favicon_key')
|
|
|
|
# Delete old favicon if exists
|
|
if old_favicon_key:
|
|
try:
|
|
await r2.delete_file(old_favicon_key)
|
|
except Exception as e:
|
|
print(f"Warning: Failed to delete old favicon: {e}")
|
|
|
|
# Upload new favicon
|
|
try:
|
|
public_url, object_key, file_size = await r2.upload_file(
|
|
file=file,
|
|
folder="branding",
|
|
allowed_types=r2.ALLOWED_FAVICON_TYPES,
|
|
max_size_bytes=1 * 1024 * 1024 # 1MB limit for favicons
|
|
)
|
|
|
|
# Store URL and key in settings
|
|
set_setting(
|
|
db=db,
|
|
key='theme.favicon_url',
|
|
value=public_url,
|
|
user_id=str(current_user.id),
|
|
setting_type='plaintext',
|
|
description='Site favicon URL',
|
|
is_sensitive=False
|
|
)
|
|
|
|
set_setting(
|
|
db=db,
|
|
key='theme.favicon_key',
|
|
value=object_key,
|
|
user_id=str(current_user.id),
|
|
setting_type='plaintext',
|
|
description='R2 object key for favicon (for deletion)',
|
|
is_sensitive=False
|
|
)
|
|
|
|
# Invalidate cache
|
|
invalidate_theme_cache()
|
|
|
|
return {
|
|
"success": True,
|
|
"message": "Favicon uploaded successfully",
|
|
"favicon_url": public_url,
|
|
"file_size": file_size
|
|
}
|
|
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
raise HTTPException(
|
|
status_code=500,
|
|
detail=f"Failed to upload favicon: {str(e)}"
|
|
)
|
|
|
|
|
|
@api_router.delete("/admin/settings/theme/logo")
|
|
async def delete_theme_logo(
|
|
current_user: User = Depends(require_permission("settings.edit")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Delete organization logo (admin only).
|
|
|
|
Removes the logo from R2 storage and clears the settings,
|
|
reverting to the default logo.
|
|
"""
|
|
r2 = get_r2_storage()
|
|
|
|
# Get current logo key for deletion
|
|
logo_key = get_setting(db, 'theme.logo_key')
|
|
|
|
if logo_key:
|
|
try:
|
|
await r2.delete_file(logo_key)
|
|
except Exception as e:
|
|
print(f"Warning: Failed to delete logo from R2: {e}")
|
|
|
|
# Delete the settings
|
|
db.query(SystemSettings).filter(
|
|
SystemSettings.setting_key.in_(['theme.logo_url', 'theme.logo_key'])
|
|
).delete(synchronize_session=False)
|
|
db.commit()
|
|
|
|
# Invalidate cache
|
|
invalidate_theme_cache()
|
|
|
|
return {
|
|
"success": True,
|
|
"message": "Logo deleted successfully"
|
|
}
|
|
|
|
|
|
@api_router.delete("/admin/settings/theme/favicon")
|
|
async def delete_theme_favicon(
|
|
current_user: User = Depends(require_permission("settings.edit")),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Delete site favicon (admin only).
|
|
|
|
Removes the favicon from R2 storage and clears the settings,
|
|
reverting to the default favicon.
|
|
"""
|
|
r2 = get_r2_storage()
|
|
|
|
# Get current favicon key for deletion
|
|
favicon_key = get_setting(db, 'theme.favicon_key')
|
|
|
|
if favicon_key:
|
|
try:
|
|
await r2.delete_file(favicon_key)
|
|
except Exception as e:
|
|
print(f"Warning: Failed to delete favicon from R2: {e}")
|
|
|
|
# Delete the settings
|
|
db.query(SystemSettings).filter(
|
|
SystemSettings.setting_key.in_(['theme.favicon_url', 'theme.favicon_key'])
|
|
).delete(synchronize_session=False)
|
|
db.commit()
|
|
|
|
# Invalidate cache
|
|
invalidate_theme_cache()
|
|
|
|
return {
|
|
"success": True,
|
|
"message": "Favicon deleted successfully"
|
|
}
|
|
|
|
|
|
@api_router.post("/admin/settings/theme/reset")
|
|
async def reset_theme_settings(
|
|
current_user: User = Depends(get_current_superadmin),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""
|
|
Reset all theme settings to defaults (superadmin only).
|
|
|
|
Deletes all theme.* settings from the database and removes
|
|
any uploaded logo/favicon from R2 storage.
|
|
"""
|
|
r2 = get_r2_storage()
|
|
|
|
# Get keys for uploaded files
|
|
logo_key = get_setting(db, 'theme.logo_key')
|
|
favicon_key = get_setting(db, 'theme.favicon_key')
|
|
|
|
# Delete files from R2
|
|
if logo_key:
|
|
try:
|
|
await r2.delete_file(logo_key)
|
|
except Exception as e:
|
|
print(f"Warning: Failed to delete logo from R2: {e}")
|
|
|
|
if favicon_key:
|
|
try:
|
|
await r2.delete_file(favicon_key)
|
|
except Exception as e:
|
|
print(f"Warning: Failed to delete favicon from R2: {e}")
|
|
|
|
# Delete all theme settings
|
|
deleted_count = db.query(SystemSettings).filter(
|
|
SystemSettings.setting_key.like('theme.%')
|
|
).delete(synchronize_session=False)
|
|
db.commit()
|
|
|
|
# Invalidate cache
|
|
invalidate_theme_cache()
|
|
|
|
return {
|
|
"success": True,
|
|
"message": "Theme settings reset to defaults",
|
|
"deleted_settings_count": deleted_count,
|
|
"config": DEFAULT_THEME_CONFIG
|
|
}
|
|
|
|
|
|
# Include the router in the main app
|
|
app.include_router(api_router)
|
|
|
|
# ============================================================================
|
|
# MIDDLEWARE CONFIGURATION
|
|
# ============================================================================
|
|
# IMPORTANT: In FastAPI, middleware is executed in REVERSE order of addition
|
|
# Last added = First executed
|
|
# So we add them in this order: Security Headers -> CORS
|
|
# Execution order will be: CORS -> Security Headers
|
|
|
|
# Security Headers Middleware (Added first, executes second)
|
|
@app.middleware("http")
|
|
async def add_security_headers(request: Request, call_next):
|
|
response = await call_next(request)
|
|
|
|
# Security headers to protect against common vulnerabilities
|
|
security_headers = {
|
|
# Prevent clickjacking attacks
|
|
"X-Frame-Options": "DENY",
|
|
|
|
# Prevent MIME type sniffing
|
|
"X-Content-Type-Options": "nosniff",
|
|
|
|
# Enable XSS protection in older browsers
|
|
"X-XSS-Protection": "1; mode=block",
|
|
|
|
# Control referrer information
|
|
"Referrer-Policy": "strict-origin-when-cross-origin",
|
|
|
|
# Permissions policy (formerly Feature-Policy)
|
|
"Permissions-Policy": "geolocation=(), microphone=(), camera=()",
|
|
}
|
|
|
|
# Add HSTS header in production (force HTTPS)
|
|
if IS_PRODUCTION:
|
|
security_headers["Strict-Transport-Security"] = "max-age=31536000; includeSubDomains"
|
|
|
|
# Apply all security headers
|
|
for header, value in security_headers.items():
|
|
response.headers[header] = value
|
|
|
|
# Remove server identification headers (use del, not pop for MutableHeaders)
|
|
if "Server" in response.headers:
|
|
del response.headers["Server"]
|
|
|
|
return response
|
|
|
|
print(f"✓ Security headers configured (Production: {IS_PRODUCTION})")
|
|
|
|
# CORS Configuration (Added second, executes first)
|
|
cors_origins = os.environ.get('CORS_ORIGINS', '')
|
|
if cors_origins:
|
|
# Use explicitly configured origins
|
|
allowed_origins = [origin.strip() for origin in cors_origins.split(',')]
|
|
else:
|
|
# Default to common development origins if not configured
|
|
allowed_origins = [
|
|
"http://localhost:3000",
|
|
"http://localhost:8000",
|
|
"http://127.0.0.1:3000",
|
|
"http://127.0.0.1:8000"
|
|
]
|
|
print(f"⚠️ WARNING: CORS_ORIGINS not set. Using defaults: {allowed_origins}")
|
|
print("⚠️ For production, set CORS_ORIGINS in .env file!")
|
|
|
|
print(f"✓ CORS allowed origins: {allowed_origins}")
|
|
|
|
app.add_middleware(
|
|
CORSMiddleware,
|
|
allow_credentials=True,
|
|
allow_origins=allowed_origins,
|
|
allow_methods=["GET", "POST", "PUT", "DELETE", "OPTIONS", "PATCH"],
|
|
allow_headers=["*"],
|
|
expose_headers=["*"],
|
|
max_age=600, # Cache preflight requests for 10 minutes
|
|
) |