Alembic experiments

This commit is contained in:
Jonas Linter
2025-11-18 11:04:38 +01:00
parent 10dcbae5ad
commit 5a660507d2
17 changed files with 1716 additions and 99 deletions

View File

@@ -44,13 +44,13 @@ from .const import CONF_GOOGLE_ACCOUNT, CONF_HOTEL_ID, CONF_META_ACCOUNT, HttpSt
from .conversion_service import ConversionService
from .csv_import import CSVImporter
from .customer_service import CustomerService
from .db import Base, ResilientAsyncSession, SessionMaker, create_database_engine
from .db import ResilientAsyncSession, SessionMaker, create_database_engine
from .db import Customer as DBCustomer
from .db import Reservation as DBReservation
from .db_setup import run_startup_tasks
from .email_monitoring import ReservationStatsCollector
from .email_service import create_email_service
from .logging_config import get_logger, setup_logging
from .migrations import run_all_migrations
from .pushover_service import create_pushover_service
from .rate_limit import (
BURST_RATE_LIMIT,
@@ -331,31 +331,15 @@ async def lifespan(app: FastAPI):
elif hotel_id and not push_endpoint:
_LOGGER.info("Hotel %s has no push_endpoint configured", hotel_id)
# Create tables first (all workers)
# This ensures tables exist before migrations try to alter them
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
_LOGGER.info("Database tables checked/created at startup.")
# Run migrations after tables exist (only primary worker for race conditions)
# Run startup tasks (only in primary worker to avoid race conditions)
# NOTE: Database migrations should already have been run before the app started
# via run_migrations.py or `uv run alembic upgrade head`
if is_primary:
await run_all_migrations(engine, config)
_LOGGER.info("Running startup tasks (primary worker)...")
await run_startup_tasks(AsyncSessionLocal, config)
_LOGGER.info("Startup tasks completed")
else:
_LOGGER.info("Skipping migrations (non-primary worker)")
# Hash any existing customers (only in primary worker to avoid race conditions)
if is_primary:
async with AsyncSessionLocal() as session:
customer_service = CustomerService(session)
hashed_count = await customer_service.hash_existing_customers()
if hashed_count > 0:
_LOGGER.info(
"Backfilled hashed data for %d existing customers", hashed_count
)
else:
_LOGGER.info("All existing customers already have hashed data")
else:
_LOGGER.info("Skipping customer hashing (non-primary worker)")
_LOGGER.info("Skipping startup tasks (non-primary worker)")
# Initialize and hook up stats collector for daily reports
# Note: report_scheduler will only exist on the primary worker