Fixed generic_webhook error and made the duplicate submission thing a bit cleaner

This commit is contained in:
Jonas Linter
2025-10-27 09:02:50 +01:00
parent 90d79a71fb
commit e7b789fcac

View File

@@ -26,6 +26,7 @@ from fastapi.security import (
) )
from pydantic import BaseModel from pydantic import BaseModel
from slowapi.errors import RateLimitExceeded from slowapi.errors import RateLimitExceeded
from sqlalchemy.exc import IntegrityError
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
from alpine_bits_python.schemas import ReservationData from alpine_bits_python.schemas import ReservationData
@@ -73,10 +74,7 @@ TOKEN_LOG_LENGTH = 10
def get_advertising_account_ids( def get_advertising_account_ids(
config: dict[str, Any], config: dict[str, Any], hotel_code: str, fbclid: str | None, gclid: str | None
hotel_code: str,
fbclid: str | None,
gclid: str | None
) -> tuple[str | None, str | None]: ) -> tuple[str | None, str | None]:
"""Get advertising account IDs based on hotel config and click IDs. """Get advertising account IDs based on hotel config and click IDs.
@@ -830,13 +828,22 @@ async def process_generic_webhook_submission(
_LOGGER.warning("Invalid age value for child %d: %s", i, age_str) _LOGGER.warning("Invalid age value for child %d: %s", i, age_str)
# Extract tracking information # Extract tracking information
utm_source = tracking_data.get("utm_source") utm_source = None
utm_medium = tracking_data.get("utm_medium") utm_medium = None
utm_campaign = tracking_data.get("utm_campaign") utm_campaign = None
utm_term = tracking_data.get("utm_term") utm_term = None
utm_content = tracking_data.get("utm_content") utm_content = None
fbclid = tracking_data.get("fbclid") fbclid = None
gclid = tracking_data.get("gclid") gclid = None
if tracking_data:
utm_source = tracking_data.get("utm_source")
utm_medium = tracking_data.get("utm_medium")
utm_campaign = tracking_data.get("utm_campaign")
utm_term = tracking_data.get("utm_term")
utm_content = tracking_data.get("utm_content")
fbclid = tracking_data.get("fbclid")
gclid = tracking_data.get("gclid")
# Parse submission timestamp # Parse submission timestamp
submission_time = data.get("timestamp") submission_time = data.get("timestamp")
@@ -1009,11 +1016,21 @@ async def handle_wix_form(
""" """
try: try:
return await process_wix_form_submission(request, data, db_session) return await process_wix_form_submission(request, data, db_session)
except IntegrityError as e:
# Handle duplicate submissions gracefully - likely same form sent twice
# or race condition between workers
if "unique constraint" in str(e).lower() and "unique_id" in str(e).lower():
_LOGGER.warning(
"Duplicate submission detected (unique_id already exists). "
"Returning success to prevent retry. Error: %s",
str(e),
)
# Return success since the reservation already exists
return {"status": "success", "message": "Reservation already processed"}
# Re-raise if it's a different integrity error
raise
except Exception as e: except Exception as e:
_LOGGER.exception("Error in handle_wix_form: %s", e) _LOGGER.exception("Error in handle_wix_form")
# Log error data to file asynchronously
import traceback
log_entry = { log_entry = {
"timestamp": datetime.now().isoformat(), "timestamp": datetime.now().isoformat(),
@@ -1021,7 +1038,6 @@ async def handle_wix_form(
"headers": dict(request.headers), "headers": dict(request.headers),
"data": data, "data": data,
"error": str(e), "error": str(e),
"traceback": traceback.format_exc(),
} }
# Use asyncio to run file I/O in thread pool to avoid blocking # Use asyncio to run file I/O in thread pool to avoid blocking
@@ -1298,14 +1314,14 @@ async def handle_xml_upload(
<status>success</status> <status>success</status>
<message>Conversion data processed successfully</message> <message>Conversion data processed successfully</message>
<stats> <stats>
<totalReservations>{processing_stats['total_reservations']}</totalReservations> <totalReservations>{processing_stats["total_reservations"]}</totalReservations>
<deletedReservations>{processing_stats['deleted_reservations']}</deletedReservations> <deletedReservations>{processing_stats["deleted_reservations"]}</deletedReservations>
<totalDailySales>{processing_stats['total_daily_sales']}</totalDailySales> <totalDailySales>{processing_stats["total_daily_sales"]}</totalDailySales>
<matchedToReservation>{processing_stats['matched_to_reservation']}</matchedToReservation> <matchedToReservation>{processing_stats["matched_to_reservation"]}</matchedToReservation>
<matchedToCustomer>{processing_stats['matched_to_customer']}</matchedToCustomer> <matchedToCustomer>{processing_stats["matched_to_customer"]}</matchedToCustomer>
<matchedToHashedCustomer>{processing_stats['matched_to_hashed_customer']}</matchedToHashedCustomer> <matchedToHashedCustomer>{processing_stats["matched_to_hashed_customer"]}</matchedToHashedCustomer>
<unmatched>{processing_stats['unmatched']}</unmatched> <unmatched>{processing_stats["unmatched"]}</unmatched>
<errors>{processing_stats['errors']}</errors> <errors>{processing_stats["errors"]}</errors>
</stats> </stats>
</response>""" </response>"""