Presumably production ready xD

This commit is contained in:
Jonas Linter
2025-11-18 16:10:57 +01:00
parent ccdc66fb9b
commit 433026dd01
6 changed files with 129551 additions and 65 deletions

View File

@@ -33,6 +33,10 @@ COPY --from=builder /app/.venv /app/.venv
# Copy application code # Copy application code
COPY src/ ./src/ COPY src/ ./src/
# Copy Alembic files for database migrations
COPY alembic.ini ./
COPY alembic/ ./alembic/
# Create directories and set permissions # Create directories and set permissions
RUN mkdir -p /app/logs && \ RUN mkdir -p /app/logs && \
chown -R appuser:appuser /app chown -R appuser:appuser /app
@@ -53,9 +57,8 @@ EXPOSE 8000
HEALTHCHECK --interval=120s --timeout=10s --start-period=60s --retries=3 \ HEALTHCHECK --interval=120s --timeout=10s --start-period=60s --retries=3 \
CMD python -c "import requests; requests.get('http://localhost:8000/api/health', timeout=5)" CMD python -c "import requests; requests.get('http://localhost:8000/api/health', timeout=5)"
# Run the application with uvicorn # Run the application with run_api.py (includes migrations)
WORKDIR /app/src CMD python -m alpine_bits_python.run_api \
CMD uvicorn alpine_bits_python.api:app \
--host 0.0.0.0 \ --host 0.0.0.0 \
--port 8000 \ --port 8000 \
--workers 4 \ --workers 4 \

File diff suppressed because it is too large Load Diff

View File

@@ -16,7 +16,7 @@ from typing import Any
import httpx import httpx
from fast_langdetect import detect from fast_langdetect import detect
from fastapi import APIRouter, Depends, FastAPI, HTTPException, Request from fastapi import APIRouter, BackgroundTasks, Depends, FastAPI, HTTPException, Request
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import HTMLResponse, Response from fastapi.responses import HTMLResponse, Response
from fastapi.security import ( from fastapi.security import (
@@ -44,9 +44,9 @@ from .const import CONF_GOOGLE_ACCOUNT, CONF_HOTEL_ID, CONF_META_ACCOUNT, HttpSt
from .conversion_service import ConversionService from .conversion_service import ConversionService
from .csv_import import CSVImporter from .csv_import import CSVImporter
from .customer_service import CustomerService from .customer_service import CustomerService
from .db import ResilientAsyncSession, SessionMaker, create_database_engine
from .db import Customer as DBCustomer from .db import Customer as DBCustomer
from .db import Reservation as DBReservation from .db import Reservation as DBReservation
from .db import ResilientAsyncSession, SessionMaker, create_database_engine
from .db_setup import run_startup_tasks from .db_setup import run_startup_tasks
from .email_monitoring import ReservationStatsCollector from .email_monitoring import ReservationStatsCollector
from .email_service import create_email_service from .email_service import create_email_service
@@ -1164,6 +1164,7 @@ async def import_csv_endpoint(
Returns: Returns:
Import statistics including created/skipped counts and any errors Import statistics including created/skipped counts and any errors
""" """
try: try:
# Validate file path to prevent path traversal # Validate file path to prevent path traversal
@@ -1202,7 +1203,7 @@ async def import_csv_endpoint(
raise HTTPException(status_code=404, detail=str(e)) raise HTTPException(status_code=404, detail=str(e))
except Exception as e: except Exception as e:
_LOGGER.exception("Error during CSV import") _LOGGER.exception("Error during CSV import")
raise HTTPException(status_code=500, detail=f"Error processing CSV: {str(e)}") raise HTTPException(status_code=500, detail=f"Error processing CSV: {e!s}")
@api_router.post("/webhook/generic") @api_router.post("/webhook/generic")
@@ -1315,10 +1316,56 @@ async def handle_generic_webhook(
) from e ) from e
async def _process_conversion_xml_background(
xml_content: str,
filename: str,
session_maker: SessionMaker,
log_filename: Path,
):
"""Background task to process conversion XML.
This runs in a separate asyncio task after the HTTP response is sent.
Handles both file prettification and database processing.
"""
try:
# First, prettify and save the XML file (in background)
try:
dom = xml.dom.minidom.parseString(xml_content)
pretty_xml = dom.toprettyxml(indent=" ")
# Remove extra blank lines that toprettyxml adds
pretty_xml = "\n".join(
[line for line in pretty_xml.split("\n") if line.strip()]
)
await asyncio.to_thread(
log_filename.write_text, pretty_xml, encoding="utf-8"
)
_LOGGER.debug("XML file prettified and saved to %s", log_filename)
except Exception as e:
# If formatting fails, save the original content
_LOGGER.warning("Failed to format XML: %s. Saving unformatted.", str(e))
await asyncio.to_thread(
log_filename.write_text, xml_content, encoding="utf-8"
)
# Now process the conversion XML
_LOGGER.info("Starting database processing of %s", filename)
conversion_service = ConversionService(session_maker)
processing_stats = await conversion_service.process_conversion_xml(xml_content)
_LOGGER.info(
"Conversion processing complete for %s: %s", filename, processing_stats
)
except Exception:
_LOGGER.exception(
"Error processing conversion XML in background for %s", filename
)
@api_router.put("/hoteldata/conversions_import/{filename:path}") @api_router.put("/hoteldata/conversions_import/{filename:path}")
@limiter.limit(DEFAULT_RATE_LIMIT) @limiter.limit(DEFAULT_RATE_LIMIT)
async def handle_xml_upload( async def handle_xml_upload(
request: Request, request: Request,
background_tasks: BackgroundTasks,
filename: str, filename: str,
credentials_tupel: tuple = Depends(validate_basic_auth), credentials_tupel: tuple = Depends(validate_basic_auth),
db_session=Depends(get_async_session), db_session=Depends(get_async_session),
@@ -1332,6 +1379,8 @@ async def handle_xml_upload(
- Links conversions to customers and hashed_customers - Links conversions to customers and hashed_customers
- Stores daily sales revenue data - Stores daily sales revenue data
Returns immediately with 202 Accepted while processing continues in background.
Requires basic authentication and saves XML files to log directory. Requires basic authentication and saves XML files to log directory.
Supports gzip compression via Content-Encoding header. Supports gzip compression via Content-Encoding header.
@@ -1377,65 +1426,33 @@ async def handle_xml_upload(
status_code=400, detail="ERROR: Content does not appear to be XML" status_code=400, detail="ERROR: Content does not appear to be XML"
) )
# Create logs directory for XML conversions # Create logs directory for XML conversions (blocking, but fast)
logs_dir = Path("logs/conversions_import") logs_dir = Path("logs/conversions_import")
if not logs_dir.exists(): logs_dir.mkdir(parents=True, mode=0o755, exist_ok=True)
logs_dir.mkdir(parents=True, mode=0o755, exist_ok=True)
_LOGGER.info("Created directory: %s", logs_dir)
# Generate filename with timestamp and authenticated user # Generate filename with timestamp and authenticated user
username, _ = credentials_tupel username, _ = credentials_tupel
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
# Use the filename from the path, but add timestamp and username for uniqueness
base_filename = Path(filename).stem base_filename = Path(filename).stem
extension = Path(filename).suffix or ".xml" extension = Path(filename).suffix or ".xml"
log_filename = logs_dir / f"{base_filename}_{username}_{timestamp}{extension}" log_filename = logs_dir / f"{base_filename}_{username}_{timestamp}{extension}"
# Format and save XML content to file
try:
dom = xml.dom.minidom.parseString(xml_content)
pretty_xml = dom.toprettyxml(indent=" ")
# Remove extra blank lines that toprettyxml adds
pretty_xml = "\n".join([line for line in pretty_xml.split("\n") if line.strip()])
log_filename.write_text(pretty_xml, encoding="utf-8")
except Exception as e:
# If formatting fails, save the original content
_LOGGER.warning("Failed to format XML: %s. Saving unformatted.", str(e))
log_filename.write_text(xml_content, encoding="utf-8")
_LOGGER.info( _LOGGER.info(
"XML file saved to %s by user %s (original: %s)", "XML file queued for processing: %s by user %s (original: %s)",
log_filename, log_filename,
username, username,
filename, filename,
) )
# Process the conversion XML in the background # Schedule background processing using FastAPI's BackgroundTasks
async def process_in_background(): # This handles both file prettification/saving AND database processing
"""Process conversion XML asynchronously in the background.""" # This ensures the response is sent immediately
try: background_tasks.add_task(
# Use SessionMaker for concurrent processing of large XML files _process_conversion_xml_background,
# This allows multiple reservations to be processed xml_content,
# in parallel with independent sessions filename,
conversion_service = ConversionService(session_maker) session_maker,
processing_stats = await conversion_service.process_conversion_xml( log_filename,
xml_content
)
_LOGGER.info(
"Conversion processing complete for %s: %s",
filename,
processing_stats,
)
except Exception:
_LOGGER.exception(
"Error processing conversion XML in background for %s", filename
)
# Create background task and add done callback for error logging
task = asyncio.create_task(process_in_background())
task.add_done_callback(
lambda t: t.exception() if not t.cancelled() else None
) )
response_headers = { response_headers = {

View File

@@ -374,12 +374,22 @@ class ConversionService:
guest_last_name = None guest_last_name = None
guest_email = None guest_email = None
guest_country_code = None guest_country_code = None
guest_birth_date_str = None
guest_id = None
if guest_elem is not None: if guest_elem is not None:
guest_first_name = guest_elem.get("firstName") guest_first_name = guest_elem.get("firstName")
guest_last_name = guest_elem.get("lastName") guest_last_name = guest_elem.get("lastName")
guest_email = guest_elem.get("email") guest_email = guest_elem.get("email", None)
guest_country_code = guest_elem.get("countryCode") guest_country_code = guest_elem.get("countryCode", None)
guest_birth_date_str = guest_elem.get("dateOfBirth", None)
guest_id = guest_elem.get("id")
guest_birth_date = (
datetime.strptime(guest_birth_date_str, "%Y-%m-%d").date()
if guest_birth_date_str
else None
)
# Advertising/tracking data # Advertising/tracking data
advertising_medium = reservation_elem.get("advertisingMedium") advertising_medium = reservation_elem.get("advertisingMedium")
@@ -463,6 +473,7 @@ class ConversionService:
existing_conversion.guest_last_name = guest_last_name existing_conversion.guest_last_name = guest_last_name
existing_conversion.guest_email = guest_email existing_conversion.guest_email = guest_email
existing_conversion.guest_country_code = guest_country_code existing_conversion.guest_country_code = guest_country_code
existing_conversion.guest_birth_date = guest_birth_date
existing_conversion.advertising_medium = advertising_medium existing_conversion.advertising_medium = advertising_medium
existing_conversion.advertising_partner = advertising_partner existing_conversion.advertising_partner = advertising_partner
existing_conversion.advertising_campagne = advertising_campagne existing_conversion.advertising_campagne = advertising_campagne
@@ -495,6 +506,8 @@ class ConversionService:
guest_last_name=guest_last_name, guest_last_name=guest_last_name,
guest_email=guest_email, guest_email=guest_email,
guest_country_code=guest_country_code, guest_country_code=guest_country_code,
guest_birth_date=guest_birth_date,
guest_id=guest_id,
# Advertising data # Advertising data
advertising_medium=advertising_medium, advertising_medium=advertising_medium,
advertising_partner=advertising_partner, advertising_partner=advertising_partner,
@@ -622,6 +635,16 @@ class ConversionService:
# Check if room reservation already exists using batch-loaded data # Check if room reservation already exists using batch-loaded data
existing_room_reservation = existing_rooms.get(pms_hotel_reservation_id) existing_room_reservation = existing_rooms.get(pms_hotel_reservation_id)
if total_revenue > 0 and (
guest_first_name is None
and guest_last_name is None
and guest_email is None
):
_LOGGER.info(
"Guest info missing but total revenue > 0 for PMS ID %s",
pms_reservation_id,
)
if existing_room_reservation: if existing_room_reservation:
# Update existing room reservation with all fields # Update existing room reservation with all fields
existing_room_reservation.arrival_date = arrival_date existing_room_reservation.arrival_date = arrival_date
@@ -635,7 +658,7 @@ class ConversionService:
daily_sales_list if daily_sales_list else None daily_sales_list if daily_sales_list else None
) )
existing_room_reservation.total_revenue = ( existing_room_reservation.total_revenue = (
str(total_revenue) if total_revenue > 0 else None total_revenue if total_revenue > 0 else None
) )
existing_room_reservation.updated_at = datetime.now() existing_room_reservation.updated_at = datetime.now()
_LOGGER.debug( _LOGGER.debug(

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
"""Startup script for the Wix Form Handler API. """Startup script for the Alpine Bits Python Server API.
This script: This script:
1. Runs database migrations using Alembic 1. Runs database migrations using Alembic
@@ -10,13 +10,85 @@ is up to date. This approach works well with multiple workers since migrations
complete before any worker starts processing requests. complete before any worker starts processing requests.
""" """
import argparse
import sys import sys
import uvicorn import uvicorn
from alpine_bits_python.run_migrations import run_migrations from alpine_bits_python.run_migrations import run_migrations
def parse_args() -> argparse.Namespace:
"""Parse command line arguments for uvicorn configuration."""
parser = argparse.ArgumentParser(
description="Run Alpine Bits Python Server with database migrations"
)
parser.add_argument(
"--host",
type=str,
default="0.0.0.0",
help="Host to bind to (default: 0.0.0.0)",
)
parser.add_argument(
"--port",
type=int,
default=8080,
help="Port to bind to (default: 8080)",
)
parser.add_argument(
"--workers",
type=int,
default=1,
help="Number of worker processes (default: 1)",
)
parser.add_argument(
"--reload",
action="store_true",
default=False,
help="Enable auto-reload for development (default: False)",
)
parser.add_argument(
"--log-level",
type=str,
default="info",
choices=["critical", "error", "warning", "info", "debug", "trace"],
help="Log level (default: info)",
)
parser.add_argument(
"--access-log",
action="store_true",
default=False,
help="Enable access log (default: False)",
)
parser.add_argument(
"--forwarded-allow-ips",
type=str,
default="127.0.0.1",
help=(
"Comma-separated list of IPs to trust for proxy headers "
"(default: 127.0.0.1)"
),
)
parser.add_argument(
"--proxy-headers",
action="store_true",
default=False,
help="Enable proxy headers (X-Forwarded-* headers) (default: False)",
)
parser.add_argument(
"--no-server-header",
action="store_true",
default=False,
help="Disable Server header in responses (default: False)",
)
return parser.parse_args()
if __name__ == "__main__": if __name__ == "__main__":
# Parse command line arguments
args = parse_args()
# Run database migrations before starting the server # Run database migrations before starting the server
# This ensures the schema is up to date before any workers start # This ensures the schema is up to date before any workers start
print("Running database migrations...") print("Running database migrations...")
@@ -31,8 +103,13 @@ if __name__ == "__main__":
print("Starting API server...") print("Starting API server...")
uvicorn.run( uvicorn.run(
"alpine_bits_python.api:app", "alpine_bits_python.api:app",
host="0.0.0.0", host=args.host,
port=8080, port=args.port,
reload=True, # Enable auto-reload during development workers=args.workers,
log_level="info", reload=args.reload,
log_level=args.log_level,
access_log=args.access_log,
forwarded_allow_ips=args.forwarded_allow_ips,
proxy_headers=args.proxy_headers,
server_header=not args.no_server_header,
) )

View File

@@ -1,12 +1,30 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
"""Convenience launcher for the Wix Form Handler API.""" """Convenience launcher for the Alpine Bits Python Server API (Development Mode)."""
import os
import subprocess import subprocess
# Change to src directory # Run the API using uv with development settings
src_dir = os.path.join(os.path.dirname(__file__), "src/alpine_bits_python") # This includes:
# - Auto-reload enabled for code changes
# Run the API using uv # - Single worker for easier debugging
# - Port 8080 for development
if __name__ == "__main__": if __name__ == "__main__":
subprocess.run(["uv", "run", "python", os.path.join(src_dir, "run_api.py")], check=False) subprocess.run(
[
"uv",
"run",
"python",
"-m",
"alpine_bits_python.run_api",
"--host",
"0.0.0.0",
"--port",
"8080",
"--workers",
"1",
"--reload",
"--log-level",
"info",
],
check=False,
)