Converted csv_import to put request

This commit is contained in:
Jonas Linter
2025-11-18 16:23:58 +01:00
parent 433026dd01
commit df84d8c898

View File

@@ -1139,71 +1139,164 @@ async def handle_wix_form_test(
raise HTTPException(status_code=500, detail="Error processing test data") raise HTTPException(status_code=500, detail="Error processing test data")
@api_router.post("/admin/import-csv") async def _process_csv_import_background(
csv_content: str,
filename: str,
hotel_code: str | None,
session_maker: SessionMaker,
config: dict[str, Any],
log_filename: Path,
):
"""Background task to process CSV import.
This runs in a separate asyncio task after the HTTP response is sent.
Handles both file saving and database processing.
"""
try:
# First, save the CSV file (in background)
await asyncio.to_thread(log_filename.write_text, csv_content, encoding="utf-8")
_LOGGER.debug("CSV file saved to %s", log_filename)
# Now process the CSV import
_LOGGER.info("Starting database processing of %s", filename)
# Create a new session for this background task
async with session_maker() as db_session:
importer = CSVImporter(db_session, config)
stats = await importer.import_csv_file(str(log_filename), hotel_code, dryrun=False)
_LOGGER.info(
"CSV import complete for %s: %s", filename, stats
)
except Exception:
_LOGGER.exception(
"Error processing CSV import in background for %s", filename
)
@api_router.put("/admin/import-csv/{filename:path}")
@limiter.limit(BURST_RATE_LIMIT) @limiter.limit(BURST_RATE_LIMIT)
async def import_csv_endpoint( async def import_csv_endpoint(
request: Request, request: Request,
csv_file_path: str, background_tasks: BackgroundTasks,
filename: str,
hotel_code: str | None = None, hotel_code: str | None = None,
credentials: tuple = Depends(validate_basic_auth), credentials: tuple = Depends(validate_basic_auth),
db_session=Depends(get_async_session), db_session=Depends(get_async_session),
session_maker: SessionMaker = Depends(get_session_maker),
): ):
"""Import reservations from a CSV file (landing_page_form.csv format). """Import reservations from CSV data sent via PUT request.
This endpoint allows importing historical form data into the system. This endpoint allows importing historical form data into the system.
It creates customers and reservations, avoiding duplicates based on: It creates customers and reservations, avoiding duplicates based on:
- Name, email, reservation dates - Name, email, reservation dates
- fbclid/gclid tracking IDs - fbclid/gclid tracking IDs
Requires basic authentication. Returns immediately with 202 Accepted while processing continues in background.
Requires basic authentication and saves CSV files to log directory.
Supports gzip compression via Content-Encoding header.
Args: Args:
csv_file_path: Path to CSV file (relative to app root) filename: Name for the CSV file (used for logging)
hotel_code: Optional hotel code to override CSV values hotel_code: Optional hotel code to override CSV values
credentials: Basic auth credentials credentials: Basic auth credentials
Returns: Example: PUT /api/admin/import-csv/reservations.csv
Import statistics including created/skipped counts and any errors
""" """
try: try:
# Validate file path to prevent path traversal # Validate filename to prevent path traversal
if ".." in csv_file_path or csv_file_path.startswith("/"): if ".." in filename or filename.startswith("/"):
raise HTTPException(status_code=400, detail="Invalid file path") raise HTTPException(status_code=400, detail="ERROR: Invalid filename")
# Check if file exists # Get the raw body content
csv_path = Path(csv_file_path) body = await request.body()
if not csv_path.exists():
# Try relative to app root if not body:
csv_path = Path() / csv_file_path
if not csv_path.exists():
raise HTTPException( raise HTTPException(
status_code=404, detail=f"CSV file not found: {csv_file_path}" status_code=400, detail="ERROR: No CSV content provided"
) )
# Check if content is gzip compressed
content_encoding = request.headers.get("content-encoding", "").lower()
is_gzipped = content_encoding == "gzip"
# Decompress if gzipped
if is_gzipped:
try:
body = gzip.decompress(body)
except Exception as e:
raise HTTPException(
status_code=400,
detail=f"ERROR: Failed to decompress gzip content: {e}",
) from e
# Try to decode as UTF-8
try:
csv_content = body.decode("utf-8")
except UnicodeDecodeError:
# If UTF-8 fails, try with latin-1 as fallback
csv_content = body.decode("latin-1")
# Basic validation that it looks like CSV
if not csv_content.strip():
raise HTTPException(
status_code=400, detail="ERROR: CSV content is empty"
)
# Create logs directory for CSV imports (blocking, but fast)
logs_dir = Path("logs/csv_imports")
logs_dir.mkdir(parents=True, mode=0o755, exist_ok=True)
# Generate filename with timestamp and authenticated user
username, _ = credentials
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
base_filename = Path(filename).stem
extension = Path(filename).suffix or ".csv"
log_filename = logs_dir / f"{base_filename}_{username}_{timestamp}{extension}"
_LOGGER.info( _LOGGER.info(
"Starting CSV import from %s (user: %s)", csv_file_path, credentials[0] "CSV file queued for processing: %s by user %s (original: %s)",
log_filename,
username,
filename,
) )
# Create importer and import # Schedule background processing using FastAPI's BackgroundTasks
importer = CSVImporter(db_session, request.app.state.config) # This handles both file saving AND database processing
stats = await importer.import_csv_file(str(csv_path), hotel_code, dryrun=False) # This ensures the response is sent immediately
background_tasks.add_task(
_process_csv_import_background,
csv_content,
filename,
hotel_code,
session_maker,
request.app.state.config,
log_filename,
)
_LOGGER.info("CSV import completed: %s", stats) response_headers = {
"Content-Type": "application/json; charset=utf-8",
return {
"status": "success",
"message": "CSV import completed",
"stats": stats,
"timestamp": datetime.now().isoformat(),
} }
except FileNotFoundError as e: # Return immediate acknowledgment
_LOGGER.error("CSV file not found: %s", e) return Response(
raise HTTPException(status_code=404, detail=str(e)) content=json.dumps({
except Exception as e: "status": "accepted",
_LOGGER.exception("Error during CSV import") "message": "CSV file received and queued for processing",
raise HTTPException(status_code=500, detail=f"Error processing CSV: {e!s}") "filename": filename,
"timestamp": datetime.now().isoformat(),
}),
headers=response_headers,
status_code=202,
)
except HTTPException:
raise
except Exception:
_LOGGER.exception("Error in import_csv_endpoint")
raise HTTPException(status_code=500, detail="Error processing CSV upload")
@api_router.post("/webhook/generic") @api_router.post("/webhook/generic")