Converted csv_import to put request

This commit is contained in:
Jonas Linter
2025-11-18 16:23:58 +01:00
parent 3f149fe984
commit 2c61d13d7a

View File

@@ -1139,71 +1139,164 @@ async def handle_wix_form_test(
raise HTTPException(status_code=500, detail="Error processing test data")
@api_router.post("/admin/import-csv")
async def _process_csv_import_background(
csv_content: str,
filename: str,
hotel_code: str | None,
session_maker: SessionMaker,
config: dict[str, Any],
log_filename: Path,
):
"""Background task to process CSV import.
This runs in a separate asyncio task after the HTTP response is sent.
Handles both file saving and database processing.
"""
try:
# First, save the CSV file (in background)
await asyncio.to_thread(log_filename.write_text, csv_content, encoding="utf-8")
_LOGGER.debug("CSV file saved to %s", log_filename)
# Now process the CSV import
_LOGGER.info("Starting database processing of %s", filename)
# Create a new session for this background task
async with session_maker() as db_session:
importer = CSVImporter(db_session, config)
stats = await importer.import_csv_file(str(log_filename), hotel_code, dryrun=False)
_LOGGER.info(
"CSV import complete for %s: %s", filename, stats
)
except Exception:
_LOGGER.exception(
"Error processing CSV import in background for %s", filename
)
@api_router.put("/admin/import-csv/{filename:path}")
@limiter.limit(BURST_RATE_LIMIT)
async def import_csv_endpoint(
request: Request,
csv_file_path: str,
background_tasks: BackgroundTasks,
filename: str,
hotel_code: str | None = None,
credentials: tuple = Depends(validate_basic_auth),
db_session=Depends(get_async_session),
session_maker: SessionMaker = Depends(get_session_maker),
):
"""Import reservations from a CSV file (landing_page_form.csv format).
"""Import reservations from CSV data sent via PUT request.
This endpoint allows importing historical form data into the system.
It creates customers and reservations, avoiding duplicates based on:
- Name, email, reservation dates
- fbclid/gclid tracking IDs
Requires basic authentication.
Returns immediately with 202 Accepted while processing continues in background.
Requires basic authentication and saves CSV files to log directory.
Supports gzip compression via Content-Encoding header.
Args:
csv_file_path: Path to CSV file (relative to app root)
filename: Name for the CSV file (used for logging)
hotel_code: Optional hotel code to override CSV values
credentials: Basic auth credentials
Returns:
Import statistics including created/skipped counts and any errors
Example: PUT /api/admin/import-csv/reservations.csv
"""
try:
# Validate file path to prevent path traversal
if ".." in csv_file_path or csv_file_path.startswith("/"):
raise HTTPException(status_code=400, detail="Invalid file path")
# Validate filename to prevent path traversal
if ".." in filename or filename.startswith("/"):
raise HTTPException(status_code=400, detail="ERROR: Invalid filename")
# Check if file exists
csv_path = Path(csv_file_path)
if not csv_path.exists():
# Try relative to app root
csv_path = Path() / csv_file_path
if not csv_path.exists():
# Get the raw body content
body = await request.body()
if not body:
raise HTTPException(
status_code=404, detail=f"CSV file not found: {csv_file_path}"
status_code=400, detail="ERROR: No CSV content provided"
)
# Check if content is gzip compressed
content_encoding = request.headers.get("content-encoding", "").lower()
is_gzipped = content_encoding == "gzip"
# Decompress if gzipped
if is_gzipped:
try:
body = gzip.decompress(body)
except Exception as e:
raise HTTPException(
status_code=400,
detail=f"ERROR: Failed to decompress gzip content: {e}",
) from e
# Try to decode as UTF-8
try:
csv_content = body.decode("utf-8")
except UnicodeDecodeError:
# If UTF-8 fails, try with latin-1 as fallback
csv_content = body.decode("latin-1")
# Basic validation that it looks like CSV
if not csv_content.strip():
raise HTTPException(
status_code=400, detail="ERROR: CSV content is empty"
)
# Create logs directory for CSV imports (blocking, but fast)
logs_dir = Path("logs/csv_imports")
logs_dir.mkdir(parents=True, mode=0o755, exist_ok=True)
# Generate filename with timestamp and authenticated user
username, _ = credentials
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
base_filename = Path(filename).stem
extension = Path(filename).suffix or ".csv"
log_filename = logs_dir / f"{base_filename}_{username}_{timestamp}{extension}"
_LOGGER.info(
"Starting CSV import from %s (user: %s)", csv_file_path, credentials[0]
"CSV file queued for processing: %s by user %s (original: %s)",
log_filename,
username,
filename,
)
# Create importer and import
importer = CSVImporter(db_session, request.app.state.config)
stats = await importer.import_csv_file(str(csv_path), hotel_code, dryrun=False)
# Schedule background processing using FastAPI's BackgroundTasks
# This handles both file saving AND database processing
# This ensures the response is sent immediately
background_tasks.add_task(
_process_csv_import_background,
csv_content,
filename,
hotel_code,
session_maker,
request.app.state.config,
log_filename,
)
_LOGGER.info("CSV import completed: %s", stats)
return {
"status": "success",
"message": "CSV import completed",
"stats": stats,
"timestamp": datetime.now().isoformat(),
response_headers = {
"Content-Type": "application/json; charset=utf-8",
}
except FileNotFoundError as e:
_LOGGER.error("CSV file not found: %s", e)
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
_LOGGER.exception("Error during CSV import")
raise HTTPException(status_code=500, detail=f"Error processing CSV: {e!s}")
# Return immediate acknowledgment
return Response(
content=json.dumps({
"status": "accepted",
"message": "CSV file received and queued for processing",
"filename": filename,
"timestamp": datetime.now().isoformat(),
}),
headers=response_headers,
status_code=202,
)
except HTTPException:
raise
except Exception:
_LOGGER.exception("Error in import_csv_endpoint")
raise HTTPException(status_code=500, detail="Error processing CSV upload")
@api_router.post("/webhook/generic")