2 Commits

Author SHA1 Message Date
Jonas Linter
a8c441ea6f Stats collector for email monitoring 2025-10-15 09:09:07 +02:00
Jonas Linter
5a0ae44a45 fixed test warning 2025-10-15 08:55:51 +02:00
5 changed files with 174 additions and 28 deletions

View File

@@ -14059,3 +14059,17 @@ IndexError: list index out of range
2025-10-10 10:59:53 - alpine_bits_python.api - INFO - Hotel 39040_001 has no push_endpoint configured
2025-10-10 10:59:53 - alpine_bits_python.api - INFO - Database tables checked/created at startup.
2025-10-10 10:59:53 - httpx - INFO - HTTP Request: PUT http://testserver/api/hoteldata/conversions_import/test_reservation.xml "HTTP/1.1 401 Unauthorized"
2025-10-15 08:49:50 - root - INFO - Logging to file: alpinebits.log
2025-10-15 08:49:50 - root - INFO - Logging configured at INFO level
2025-10-15 08:49:52 - alpine_bits_python.email_service - INFO - Email service initialized: smtp.gmail.com:587
2025-10-15 08:49:52 - root - INFO - Logging to file: alpinebits.log
2025-10-15 08:49:52 - root - INFO - Logging configured at INFO level
2025-10-15 08:49:54 - alpine_bits_python.email_service - INFO - Email service initialized: smtp.gmail.com:587
2025-10-15 08:52:37 - root - INFO - Logging to file: alpinebits.log
2025-10-15 08:52:37 - root - INFO - Logging configured at INFO level
2025-10-15 08:52:54 - root - INFO - Logging to file: alpinebits.log
2025-10-15 08:52:54 - root - INFO - Logging configured at INFO level
2025-10-15 08:52:56 - alpine_bits_python.email_service - INFO - Email service initialized: smtp.titan.email:465
2025-10-15 08:52:56 - root - INFO - Logging to file: alpinebits.log
2025-10-15 08:52:56 - root - INFO - Logging configured at INFO level
2025-10-15 08:52:58 - alpine_bits_python.email_service - INFO - Email service initialized: smtp.titan.email:465

View File

@@ -44,15 +44,15 @@ alpine_bits_auth:
email:
# SMTP server configuration
smtp:
host: "smtp.gmail.com" # Your SMTP server
port: 587 # Usually 587 for TLS, 465 for SSL
username: !secret EMAIL_USERNAME # SMTP username
host: "smtp.titan.email" # Your SMTP server
port: 465 # Usually 587 for TLS, 465 for SSL
username: info@99tales.net # SMTP username
password: !secret EMAIL_PASSWORD # SMTP password
use_tls: true # Use STARTTLS
use_ssl: false # Use SSL/TLS from start
use_tls: false # Use STARTTLS
use_ssl: true # Use SSL/TLS from start
# Email addresses
from_address: "noreply@99tales.com" # Sender address
from_address: "info@99tales.net" # Sender address
from_name: "AlpineBits Monitor" # Sender display name
# Monitoring and alerting
@@ -61,8 +61,8 @@ email:
daily_report:
enabled: false # Set to true to enable daily reports
recipients:
- "admin@99tales.com"
- "dev@99tales.com"
- "jonas@vaius.ai"
#- "dev@99tales.com"
send_time: "08:00" # Time to send daily report (24h format, local time)
include_stats: true # Include reservation/customer stats
include_errors: true # Include error summary
@@ -71,8 +71,8 @@ email:
error_alerts:
enabled: false # Set to true to enable error alerts
recipients:
- "alerts@99tales.com"
- "oncall@99tales.com"
- "jonas@vaius.ai"
#- "oncall@99tales.com"
# Alert is sent immediately if threshold is reached
error_threshold: 5 # Send immediate alert after N errors
# Otherwise, alert is sent after buffer time expires

View File

@@ -20,11 +20,11 @@ email:
## 2. Set Environment Variables
Create a `.env` file in the project root:
In the secrets.yaml file add the secrets
```bash
EMAIL_USERNAME=your-email@gmail.com
EMAIL_PASSWORD=your-app-password
```yaml
EMAIL_USERNAME: "your_email_username"
EMAIL_PASSWORD: "your_email_password"
```
> **Note:** For Gmail, use an [App Password](https://support.google.com/accounts/answer/185833), not your regular password.
@@ -46,6 +46,7 @@ email:
```
**How it works:**
- Sends immediate alert after 5 errors
- Otherwise sends after 15 minutes
- Waits 15 minutes between alerts (cooldown)
@@ -75,6 +76,7 @@ uv run python examples/test_email_monitoring.py
```
This will:
- ✅ Send a test email
- ✅ Trigger an error alert
- ✅ Send a test daily report
@@ -124,12 +126,14 @@ Errors (3):
### No emails received?
1. Check your SMTP credentials:
```bash
echo $EMAIL_USERNAME
echo $EMAIL_PASSWORD
```
2. Check application logs for errors:
```bash
tail -f alpinebits.log | grep -i email
```
@@ -167,6 +171,7 @@ Errors (3):
## Support
For issues or questions:
- Check the [documentation](./EMAIL_MONITORING.md)
- Review [test examples](../examples/test_email_monitoring.py)
- Open an issue on GitHub

View File

@@ -32,6 +32,7 @@ from .customer_service import CustomerService
from .db import Base, get_database_url
from .db import Customer as DBCustomer
from .db import Reservation as DBReservation
from .email_monitoring import ReservationStatsCollector
from .email_service import create_email_service
from .logging_config import get_logger, setup_logging
from .rate_limit import (
@@ -245,8 +246,17 @@ async def lifespan(app: FastAPI):
else:
_LOGGER.info("All existing customers already have hashed data")
# Start daily report scheduler if enabled
# Initialize and hook up stats collector for daily reports
if report_scheduler:
stats_collector = ReservationStatsCollector(
async_sessionmaker=AsyncSessionLocal,
config=config,
)
# Hook up the stats collector to the report scheduler
report_scheduler.set_stats_collector(stats_collector.collect_stats)
_LOGGER.info("Stats collector initialized and hooked up to report scheduler")
# Start daily report scheduler
report_scheduler.start()
_LOGGER.info("Daily report scheduler started")

View File

@@ -7,10 +7,14 @@ email alerts based on configurable thresholds and time windows.
import asyncio
import logging
import threading
from collections import deque
from collections import defaultdict, deque
from datetime import datetime, timedelta
from typing import Any
from sqlalchemy import func, select
from sqlalchemy.ext.asyncio import async_sessionmaker
from .db import Reservation
from .email_service import EmailService
from .logging_config import get_logger
@@ -174,13 +178,12 @@ class EmailAlertHandler(logging.Handler):
self._flush_buffer(immediate=True),
self.loop,
)
else:
# Schedule delayed flush if not already scheduled
if not self._flush_task or self._flush_task.done():
self._flush_task = asyncio.run_coroutine_threadsafe(
self._schedule_delayed_flush(),
self.loop,
)
# Schedule delayed flush if not already scheduled
elif not self._flush_task or self._flush_task.done():
self._flush_task = asyncio.run_coroutine_threadsafe(
self._schedule_delayed_flush(),
self.loop,
)
except Exception:
# Never let the handler crash - just log and continue
@@ -237,7 +240,9 @@ class EmailAlertHandler(logging.Handler):
emoji = "⚠️"
reason = f"({self.buffer_minutes} minute buffer)"
subject = f"{emoji} AlpineBits Error {alert_type}: {error_count} errors {reason}"
subject = (
f"{emoji} AlpineBits Error {alert_type}: {error_count} errors {reason}"
)
# Build plain text body
body = f"Error Alert - {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n"
@@ -290,10 +295,18 @@ class EmailAlertHandler(logging.Handler):
# Flush any remaining errors immediately
if self.error_buffer and self.loop:
try:
asyncio.run_coroutine_threadsafe(
self._flush_buffer(immediate=False),
self.loop,
).result(timeout=5)
# Check if the loop is still running
if not self.loop.is_closed():
future = asyncio.run_coroutine_threadsafe(
self._flush_buffer(immediate=False),
self.loop,
)
future.result(timeout=5)
else:
_LOGGER.warning(
"Event loop closed, cannot flush %d remaining errors",
len(self.error_buffer),
)
except Exception:
_LOGGER.exception("Error flushing buffer on close")
@@ -440,3 +453,107 @@ class DailyReportScheduler:
"""
self._stats_collector = collector
class ReservationStatsCollector:
"""Collects reservation statistics per hotel for daily reports.
This collector queries the database for reservations created since the last
report and aggregates them by hotel. It includes hotel_code and hotel_name
from the configuration.
"""
def __init__(
self,
async_sessionmaker: async_sessionmaker,
config: dict[str, Any],
):
"""Initialize the stats collector.
Args:
async_sessionmaker: SQLAlchemy async session maker
config: Application configuration containing hotel information
"""
self.async_sessionmaker = async_sessionmaker
self.config = config
self._last_report_time = datetime.now()
# Build hotel mapping from config
self._hotel_map = {}
for hotel in config.get("alpine_bits_auth", []):
hotel_id = hotel.get("hotel_id")
hotel_name = hotel.get("hotel_name")
if hotel_id:
self._hotel_map[hotel_id] = hotel_name or "Unknown Hotel"
_LOGGER.info(
"ReservationStatsCollector initialized with %d hotels",
len(self._hotel_map),
)
async def collect_stats(self) -> dict[str, Any]:
"""Collect reservation statistics for the reporting period.
Returns:
Dictionary with statistics including reservations per hotel
"""
now = datetime.now()
period_start = self._last_report_time
period_end = now
_LOGGER.info(
"Collecting reservation stats from %s to %s",
period_start.strftime("%Y-%m-%d %H:%M:%S"),
period_end.strftime("%Y-%m-%d %H:%M:%S"),
)
async with self.async_sessionmaker() as session:
# Query reservations created in the reporting period
result = await session.execute(
select(Reservation.hotel_code, func.count(Reservation.id))
.where(Reservation.created_at >= period_start)
.where(Reservation.created_at < period_end)
.group_by(Reservation.hotel_code)
)
hotel_counts = dict(result.all())
# Build stats with hotel names from config
hotels_stats = []
total_reservations = 0
for hotel_code, count in hotel_counts.items():
hotel_name = self._hotel_map.get(hotel_code, "Unknown Hotel")
hotels_stats.append(
{
"hotel_code": hotel_code,
"hotel_name": hotel_name,
"reservations": count,
}
)
total_reservations += count
# Sort by reservation count descending
hotels_stats.sort(key=lambda x: x["reservations"], reverse=True)
# Update last report time
self._last_report_time = now
stats = {
"reporting_period": {
"start": period_start.strftime("%Y-%m-%d %H:%M:%S"),
"end": period_end.strftime("%Y-%m-%d %H:%M:%S"),
},
"total_reservations": total_reservations,
"hotels": hotels_stats,
}
_LOGGER.info(
"Collected stats: %d total reservations across %d hotels",
total_reservations,
len(hotels_stats),
)
return stats