merge_db_fixes_to_main #16
@@ -0,0 +1,51 @@
|
|||||||
|
"""remove_composite_fk_from_conversions
|
||||||
|
|
||||||
|
Revision ID: 694d52a883c3
|
||||||
|
Revises: b50c0f45030a
|
||||||
|
Create Date: 2025-12-03 09:50:18.506030
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '694d52a883c3'
|
||||||
|
down_revision: Union[str, Sequence[str], None] = 'b50c0f45030a'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_constraint(op.f('conversions_hotel_id_guest_id_fkey'), 'conversions', type_='foreignkey')
|
||||||
|
|
||||||
|
# Rename hotel_code to hotel_id (preserving data) and add FK to hotels
|
||||||
|
op.add_column('reservations', sa.Column('hotel_id', sa.String(), nullable=True))
|
||||||
|
op.execute('UPDATE reservations SET hotel_id = hotel_code')
|
||||||
|
op.drop_column('reservations', 'hotel_code')
|
||||||
|
|
||||||
|
# Add FK constraint without immediate validation (NOT VALID)
|
||||||
|
# This allows existing rows with non-existent hotel_ids to remain
|
||||||
|
# Future inserts/updates will still be validated
|
||||||
|
op.execute(
|
||||||
|
'ALTER TABLE reservations ADD CONSTRAINT fk_reservations_hotel_id_hotels '
|
||||||
|
'FOREIGN KEY (hotel_id) REFERENCES hotels (hotel_id) ON DELETE CASCADE NOT VALID'
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
# Drop FK and rename hotel_id back to hotel_code (preserving data)
|
||||||
|
op.drop_constraint(op.f('fk_reservations_hotel_id_hotels'), 'reservations', type_='foreignkey')
|
||||||
|
op.add_column('reservations', sa.Column('hotel_code', sa.VARCHAR(), autoincrement=False, nullable=True))
|
||||||
|
op.execute('UPDATE reservations SET hotel_code = hotel_id')
|
||||||
|
op.drop_column('reservations', 'hotel_id')
|
||||||
|
|
||||||
|
op.create_foreign_key(op.f('conversions_hotel_id_guest_id_fkey'), 'conversions', 'conversion_guests', ['hotel_id', 'guest_id'], ['hotel_id', 'guest_id'], ondelete='SET NULL')
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,104 @@
|
|||||||
|
"""merge_hashed_customers_into_customers
|
||||||
|
|
||||||
|
Revision ID: 0fbeb40dbb2c
|
||||||
|
Revises: 694d52a883c3
|
||||||
|
Create Date: 2025-12-03 10:44:32.243220
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '0fbeb40dbb2c'
|
||||||
|
down_revision: Union[str, Sequence[str], None] = '694d52a883c3'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
# Add hashed columns to customers table
|
||||||
|
op.add_column('customers', sa.Column('hashed_email', sa.String(length=64), nullable=True))
|
||||||
|
op.add_column('customers', sa.Column('hashed_phone', sa.String(length=64), nullable=True))
|
||||||
|
op.add_column('customers', sa.Column('hashed_given_name', sa.String(length=64), nullable=True))
|
||||||
|
op.add_column('customers', sa.Column('hashed_surname', sa.String(length=64), nullable=True))
|
||||||
|
op.add_column('customers', sa.Column('hashed_city', sa.String(length=64), nullable=True))
|
||||||
|
op.add_column('customers', sa.Column('hashed_postal_code', sa.String(length=64), nullable=True))
|
||||||
|
op.add_column('customers', sa.Column('hashed_country_code', sa.String(length=64), nullable=True))
|
||||||
|
op.add_column('customers', sa.Column('hashed_gender', sa.String(length=64), nullable=True))
|
||||||
|
op.add_column('customers', sa.Column('hashed_birth_date', sa.String(length=64), nullable=True))
|
||||||
|
op.add_column('customers', sa.Column('created_at', sa.DateTime(timezone=True), nullable=True))
|
||||||
|
|
||||||
|
# Migrate data from hashed_customers to customers
|
||||||
|
op.execute('''
|
||||||
|
UPDATE customers c
|
||||||
|
SET
|
||||||
|
hashed_email = hc.hashed_email,
|
||||||
|
hashed_phone = hc.hashed_phone,
|
||||||
|
hashed_given_name = hc.hashed_given_name,
|
||||||
|
hashed_surname = hc.hashed_surname,
|
||||||
|
hashed_city = hc.hashed_city,
|
||||||
|
hashed_postal_code = hc.hashed_postal_code,
|
||||||
|
hashed_country_code = hc.hashed_country_code,
|
||||||
|
hashed_gender = hc.hashed_gender,
|
||||||
|
hashed_birth_date = hc.hashed_birth_date,
|
||||||
|
created_at = COALESCE(c.created_at, hc.created_at)
|
||||||
|
FROM hashed_customers hc
|
||||||
|
WHERE c.id = hc.customer_id
|
||||||
|
''')
|
||||||
|
|
||||||
|
# Update reservations to point to customers instead of hashed_customers
|
||||||
|
# First, update reservations.customer_id from reservations.hashed_customer_id
|
||||||
|
op.execute('''
|
||||||
|
UPDATE reservations r
|
||||||
|
SET customer_id = hc.customer_id
|
||||||
|
FROM hashed_customers hc
|
||||||
|
WHERE r.hashed_customer_id = hc.id
|
||||||
|
AND r.customer_id IS NULL
|
||||||
|
''')
|
||||||
|
|
||||||
|
# Update conversions to point to customers instead of hashed_customers
|
||||||
|
op.execute('''
|
||||||
|
UPDATE conversions c
|
||||||
|
SET customer_id = hc.customer_id
|
||||||
|
FROM hashed_customers hc
|
||||||
|
WHERE c.hashed_customer_id = hc.id
|
||||||
|
AND c.customer_id IS NULL
|
||||||
|
''')
|
||||||
|
|
||||||
|
# Update conversion_guests to point to customers instead of hashed_customers
|
||||||
|
op.execute('''
|
||||||
|
UPDATE conversion_guests cg
|
||||||
|
SET hashed_customer_id = NULL
|
||||||
|
WHERE hashed_customer_id IS NOT NULL
|
||||||
|
''')
|
||||||
|
|
||||||
|
# Now safe to drop the FK and column from reservations
|
||||||
|
op.drop_constraint(op.f('reservations_hashed_customer_id_fkey'), 'reservations', type_='foreignkey')
|
||||||
|
op.drop_column('reservations', 'hashed_customer_id')
|
||||||
|
|
||||||
|
# Note: We're keeping the hashed_customers table for now since conversion_service.py still uses it
|
||||||
|
# It can be dropped in a future migration after updating the application code
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column('reservations', sa.Column('hashed_customer_id', sa.INTEGER(), autoincrement=False, nullable=True))
|
||||||
|
op.create_foreign_key(op.f('reservations_hashed_customer_id_fkey'), 'reservations', 'hashed_customers', ['hashed_customer_id'], ['id'], ondelete='CASCADE')
|
||||||
|
op.drop_column('customers', 'created_at')
|
||||||
|
op.drop_column('customers', 'hashed_birth_date')
|
||||||
|
op.drop_column('customers', 'hashed_gender')
|
||||||
|
op.drop_column('customers', 'hashed_country_code')
|
||||||
|
op.drop_column('customers', 'hashed_postal_code')
|
||||||
|
op.drop_column('customers', 'hashed_city')
|
||||||
|
op.drop_column('customers', 'hashed_surname')
|
||||||
|
op.drop_column('customers', 'hashed_given_name')
|
||||||
|
op.drop_column('customers', 'hashed_phone')
|
||||||
|
op.drop_column('customers', 'hashed_email')
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,63 @@
|
|||||||
|
"""removed hashed_customer completly
|
||||||
|
|
||||||
|
Revision ID: 3147e421bc47
|
||||||
|
Revises: 0fbeb40dbb2c
|
||||||
|
Create Date: 2025-12-03 11:42:05.722690
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '3147e421bc47'
|
||||||
|
down_revision: Union[str, Sequence[str], None] = '0fbeb40dbb2c'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
|
||||||
|
op.drop_index(op.f('ix_conversion_guests_hashed_customer_id'), table_name='conversion_guests')
|
||||||
|
op.drop_constraint(op.f('fk_conversion_guests_hashed_customer_id_hashed_customers'), 'conversion_guests', type_='foreignkey')
|
||||||
|
op.drop_column('conversion_guests', 'hashed_customer_id')
|
||||||
|
op.drop_index(op.f('ix_conversions_hashed_customer_id'), table_name='conversions')
|
||||||
|
op.drop_constraint(op.f('conversions_hashed_customer_id_fkey'), 'conversions', type_='foreignkey')
|
||||||
|
op.drop_column('conversions', 'hashed_customer_id')
|
||||||
|
op.drop_table('hashed_customers')
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column('conversions', sa.Column('hashed_customer_id', sa.INTEGER(), autoincrement=False, nullable=True))
|
||||||
|
op.create_foreign_key(op.f('conversions_hashed_customer_id_fkey'), 'conversions', 'hashed_customers', ['hashed_customer_id'], ['id'])
|
||||||
|
op.create_index(op.f('ix_conversions_hashed_customer_id'), 'conversions', ['hashed_customer_id'], unique=False)
|
||||||
|
op.add_column('conversion_guests', sa.Column('hashed_customer_id', sa.INTEGER(), autoincrement=False, nullable=True))
|
||||||
|
op.create_foreign_key(op.f('fk_conversion_guests_hashed_customer_id_hashed_customers'), 'conversion_guests', 'hashed_customers', ['hashed_customer_id'], ['id'])
|
||||||
|
op.create_index(op.f('ix_conversion_guests_hashed_customer_id'), 'conversion_guests', ['hashed_customer_id'], unique=False)
|
||||||
|
op.create_table('hashed_customers',
|
||||||
|
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('customer_id', sa.INTEGER(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column('contact_id', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column('hashed_email', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
|
||||||
|
sa.Column('hashed_phone', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
|
||||||
|
sa.Column('hashed_given_name', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
|
||||||
|
sa.Column('hashed_surname', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
|
||||||
|
sa.Column('hashed_city', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
|
||||||
|
sa.Column('hashed_postal_code', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
|
||||||
|
sa.Column('hashed_country_code', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
|
||||||
|
sa.Column('hashed_gender', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
|
||||||
|
sa.Column('hashed_birth_date', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
|
||||||
|
sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['customer_id'], ['customers.id'], name=op.f('hashed_customers_customer_id_fkey'), ondelete='SET NULL'),
|
||||||
|
sa.PrimaryKeyConstraint('id', name=op.f('hashed_customers_pkey')),
|
||||||
|
sa.UniqueConstraint('contact_id', name=op.f('uq_hashed_customers_contact_id'), postgresql_include=[], postgresql_nulls_not_distinct=False),
|
||||||
|
sa.UniqueConstraint('customer_id', name=op.f('uq_hashed_customers_customer_id'), postgresql_include=[], postgresql_nulls_not_distinct=False)
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,32 @@
|
|||||||
|
"""add conversions→conversion_guests fk
|
||||||
|
|
||||||
|
Revision ID: 263bed87114f
|
||||||
|
Revises: 3147e421bc47
|
||||||
|
Create Date: 2025-12-03 12:25:12.820232
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '263bed87114f'
|
||||||
|
down_revision: Union[str, Sequence[str], None] = '3147e421bc47'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_foreign_key('fk_conversions_guest', 'conversions', 'conversion_guests', ['hotel_id', 'guest_id'], ['hotel_id', 'guest_id'])
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_constraint('fk_conversions_guest', 'conversions', type_='foreignkey')
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,32 @@
|
|||||||
|
"""boolean to signify awarness match in guests
|
||||||
|
|
||||||
|
Revision ID: 1daea5172a03
|
||||||
|
Revises: 263bed87114f
|
||||||
|
Create Date: 2025-12-03 17:44:29.657898
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '1daea5172a03'
|
||||||
|
down_revision: Union[str, Sequence[str], None] = '263bed87114f'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column('conversion_guests', sa.Column('is_awareness_guest', sa.Boolean(), nullable=True))
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column('conversion_guests', 'is_awareness_guest')
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -392994,3 +392994,234 @@ DETAIL: Key (hotel_id, guest_id)=(39054_001, 28275) is not present in table "co
|
|||||||
2025-11-25 12:03:35 - alpine_bits_python.api - INFO - Email service shut down
|
2025-11-25 12:03:35 - alpine_bits_python.api - INFO - Email service shut down
|
||||||
2025-11-25 12:03:35 - alpine_bits_python.api - INFO - Application shutdown complete
|
2025-11-25 12:03:35 - alpine_bits_python.api - INFO - Application shutdown complete
|
||||||
2025-11-25 12:03:35 - alpine_bits_python.worker_coordination - INFO - Released primary worker lock (pid=22943)
|
2025-11-25 12:03:35 - alpine_bits_python.worker_coordination - INFO - Released primary worker lock (pid=22943)
|
||||||
|
2025-12-03 08:59:46 - root - INFO - Logging to file: config/alpinebits.log
|
||||||
|
2025-12-03 08:59:46 - root - INFO - Logging configured at INFO level
|
||||||
|
2025-12-03 08:59:46 - alpine_bits_python.notification_service - INFO - Registered notification backend: pushover
|
||||||
|
2025-12-03 08:59:46 - alpine_bits_python.notification_manager - INFO - Registered pushover backend with priority 0
|
||||||
|
2025-12-03 08:59:46 - alpine_bits_python.notification_manager - INFO - Notification service configured with backends: ['pushover']
|
||||||
|
2025-12-03 08:59:46 - alpine_bits_python.api - INFO - Application startup initiated (primary_worker=True)
|
||||||
|
2025-12-03 08:59:46 - alpine_bits_python.db - INFO - Configured database schema: alpinebits
|
||||||
|
2025-12-03 08:59:46 - alpine_bits_python.db - INFO - Setting PostgreSQL search_path to: alpinebits,public
|
||||||
|
2025-12-03 08:59:46 - alpine_bits_python.alpinebits_server - INFO - Initializing action instance for AlpineBitsActionName.OTA_HOTEL_NOTIF_REPORT
|
||||||
|
2025-12-03 08:59:46 - alpine_bits_python.alpinebits_server - INFO - Initializing action instance for AlpineBitsActionName.OTA_PING
|
||||||
|
2025-12-03 08:59:46 - alpine_bits_python.alpinebits_server - INFO - Initializing action instance for AlpineBitsActionName.OTA_HOTEL_RES_NOTIF_GUEST_REQUESTS
|
||||||
|
2025-12-03 08:59:46 - alpine_bits_python.alpinebits_server - INFO - Initializing action instance for AlpineBitsActionName.OTA_READ
|
||||||
|
2025-12-03 08:59:46 - alpine_bits_python.webhook_processor - INFO - Registered webhook processor: wix_form
|
||||||
|
2025-12-03 08:59:46 - alpine_bits_python.webhook_processor - INFO - Registered webhook processor: generic
|
||||||
|
2025-12-03 08:59:46 - alpine_bits_python.webhook_processor - INFO - Webhook processors initialized
|
||||||
|
2025-12-03 08:59:46 - alpine_bits_python.api - INFO - Webhook processors initialized
|
||||||
|
2025-12-03 08:59:46 - alpine_bits_python.api - INFO - Hotel 39054_001 has no push_endpoint configured
|
||||||
|
2025-12-03 08:59:46 - alpine_bits_python.api - INFO - Hotel 135 has no push_endpoint configured
|
||||||
|
2025-12-03 08:59:46 - alpine_bits_python.api - INFO - Hotel 39052_001 has no push_endpoint configured
|
||||||
|
2025-12-03 08:59:46 - alpine_bits_python.api - INFO - Hotel 39040_001 has no push_endpoint configured
|
||||||
|
2025-12-03 08:59:46 - alpine_bits_python.api - INFO - Running startup tasks (primary worker)...
|
||||||
|
2025-12-03 08:59:46 - alpine_bits_python.hotel_service - INFO - Config sync complete: 0 hotels created, 4 updated, 0 endpoints created
|
||||||
|
2025-12-03 08:59:46 - alpine_bits_python.db_setup - INFO - Config sync: 0 hotels created, 4 updated, 0 endpoints created
|
||||||
|
2025-12-03 08:59:47 - alpine_bits_python.db_setup - INFO - Backfilling advertising account IDs for existing reservations...
|
||||||
|
2025-12-03 08:59:47 - alpine_bits_python.db_setup - INFO - Found 4 hotel(s) with account configurations
|
||||||
|
2025-12-03 08:59:47 - alpine_bits_python.db_setup - INFO - Backfilling usernames for existing acked_requests...
|
||||||
|
2025-12-03 08:59:47 - alpine_bits_python.db_setup - INFO - Found 4 hotel(s) with usernames in config
|
||||||
|
2025-12-03 08:59:47 - alpine_bits_python.db_setup - INFO - Checking for stuck webhooks to reprocess...
|
||||||
|
2025-12-03 08:59:47 - alpine_bits_python.db_setup - INFO - No stuck webhooks found
|
||||||
|
2025-12-03 08:59:47 - alpine_bits_python.api - INFO - Startup tasks completed
|
||||||
|
2025-12-03 08:59:47 - alpine_bits_python.api - INFO - Webhook periodic cleanup task started
|
||||||
|
2025-12-03 08:59:47 - alpine_bits_python.api - INFO - Application startup complete
|
||||||
|
2025-12-03 08:59:51 - alpine_bits_python.api - INFO - Application shutdown initiated
|
||||||
|
2025-12-03 08:59:51 - alpine_bits_python.api - INFO - Webhook cleanup task cancelled
|
||||||
|
2025-12-03 08:59:51 - alpine_bits_python.api - INFO - Webhook cleanup task stopped
|
||||||
|
2025-12-03 08:59:51 - alpine_bits_python.email_service - INFO - Shutting down email service thread pool
|
||||||
|
2025-12-03 08:59:51 - alpine_bits_python.email_service - INFO - Email service thread pool shut down complete
|
||||||
|
2025-12-03 08:59:51 - alpine_bits_python.api - INFO - Email service shut down
|
||||||
|
2025-12-03 08:59:51 - alpine_bits_python.api - INFO - Application shutdown complete
|
||||||
|
2025-12-03 08:59:51 - alpine_bits_python.worker_coordination - INFO - Released primary worker lock (pid=9801)
|
||||||
|
2025-12-03 10:38:22 - root - INFO - Logging to file: config/alpinebits.log
|
||||||
|
2025-12-03 10:38:22 - root - INFO - Logging configured at INFO level
|
||||||
|
2025-12-03 10:38:22 - alpine_bits_python.notification_service - INFO - Registered notification backend: pushover
|
||||||
|
2025-12-03 10:38:22 - alpine_bits_python.notification_manager - INFO - Registered pushover backend with priority 0
|
||||||
|
2025-12-03 10:38:22 - alpine_bits_python.notification_manager - INFO - Notification service configured with backends: ['pushover']
|
||||||
|
2025-12-03 10:38:22 - alpine_bits_python.api - INFO - Application startup initiated (primary_worker=True)
|
||||||
|
2025-12-03 10:38:22 - alpine_bits_python.db - INFO - Configured database schema: alpinebits
|
||||||
|
2025-12-03 10:38:22 - alpine_bits_python.db - INFO - Setting PostgreSQL search_path to: alpinebits,public
|
||||||
|
2025-12-03 10:38:22 - alpine_bits_python.alpinebits_server - INFO - Initializing action instance for AlpineBitsActionName.OTA_HOTEL_NOTIF_REPORT
|
||||||
|
2025-12-03 10:38:22 - alpine_bits_python.alpinebits_server - INFO - Initializing action instance for AlpineBitsActionName.OTA_PING
|
||||||
|
2025-12-03 10:38:22 - alpine_bits_python.alpinebits_server - INFO - Initializing action instance for AlpineBitsActionName.OTA_HOTEL_RES_NOTIF_GUEST_REQUESTS
|
||||||
|
2025-12-03 10:38:22 - alpine_bits_python.alpinebits_server - INFO - Initializing action instance for AlpineBitsActionName.OTA_READ
|
||||||
|
2025-12-03 10:38:22 - alpine_bits_python.webhook_processor - INFO - Registered webhook processor: wix_form
|
||||||
|
2025-12-03 10:38:22 - alpine_bits_python.webhook_processor - INFO - Registered webhook processor: generic
|
||||||
|
2025-12-03 10:38:22 - alpine_bits_python.webhook_processor - INFO - Webhook processors initialized
|
||||||
|
2025-12-03 10:38:22 - alpine_bits_python.api - INFO - Webhook processors initialized
|
||||||
|
2025-12-03 10:38:22 - alpine_bits_python.api - INFO - Hotel 39054_001 has no push_endpoint configured
|
||||||
|
2025-12-03 10:38:22 - alpine_bits_python.api - INFO - Hotel 135 has no push_endpoint configured
|
||||||
|
2025-12-03 10:38:22 - alpine_bits_python.api - INFO - Hotel 39052_001 has no push_endpoint configured
|
||||||
|
2025-12-03 10:38:22 - alpine_bits_python.api - INFO - Hotel 39040_001 has no push_endpoint configured
|
||||||
|
2025-12-03 10:38:22 - alpine_bits_python.api - INFO - Running startup tasks (primary worker)...
|
||||||
|
2025-12-03 10:38:22 - alpine_bits_python.hotel_service - INFO - Created hotel: 39054_001
|
||||||
|
2025-12-03 10:38:22 - alpine_bits_python.hotel_service - INFO - Created webhook endpoint for hotel 39054_001, type=wix_form, secret=JWreZtpYZIMDALw71zlLStFcQFdZbBXGGhVd379GX6oeDJE2iZLebCi0Sw2d8A0T
|
||||||
|
2025-12-03 10:38:22 - alpine_bits_python.hotel_service - INFO - Created webhook endpoint for hotel 39054_001, type=generic, secret=BzBT1xmoHA4EIpupE8YOY2r9dfWG4FJY7pEU4eDD_5RW3cKRRMJXLp6JRlY3Egr3
|
||||||
|
2025-12-03 10:38:23 - alpine_bits_python.hotel_service - INFO - Created hotel: 135
|
||||||
|
2025-12-03 10:38:23 - alpine_bits_python.hotel_service - INFO - Created webhook endpoint for hotel 135, type=wix_form, secret=0vbn5mCJBIRcHtK2DS9AWFebF8LncbpcR0sDJ7zctD3wWgdPZLdiIO-743HwiljT
|
||||||
|
2025-12-03 10:38:23 - alpine_bits_python.hotel_service - INFO - Created webhook endpoint for hotel 135, type=generic, secret=ci12B1Q81uvSwpyHppL5n1T5tYRXeJnv2cP4OkWH2FoShlMCYWEuvkmxdLhvR50N
|
||||||
|
2025-12-03 10:38:23 - alpine_bits_python.hotel_service - INFO - Created hotel: 39052_001
|
||||||
|
2025-12-03 10:38:23 - alpine_bits_python.hotel_service - INFO - Created webhook endpoint for hotel 39052_001, type=wix_form, secret=V4BcT_XGcGJg7hcHhH2IVupcW4u231R711tdI-eiv15a-cSyaMlRnqrhUqNh0csC
|
||||||
|
2025-12-03 10:38:23 - alpine_bits_python.hotel_service - INFO - Created webhook endpoint for hotel 39052_001, type=generic, secret=x1M6_NYYXrHEC3aXFPkyglprNC6U5OhBFT4TW9E8SmEnpSRq0xm_ApWv4-Vl-pe3
|
||||||
|
2025-12-03 10:38:23 - alpine_bits_python.hotel_service - INFO - Created hotel: 39040_001
|
||||||
|
2025-12-03 10:38:23 - alpine_bits_python.hotel_service - INFO - Created webhook endpoint for hotel 39040_001, type=wix_form, secret=5JMgT0EI0CnRgp7jaHE1rCHQwZFMv1t9wn1yWJEBR5j_2Zrcqz_4W5g6pJBvZw4l
|
||||||
|
2025-12-03 10:38:23 - alpine_bits_python.hotel_service - INFO - Created webhook endpoint for hotel 39040_001, type=generic, secret=lrYRwnHMq5B1I_XEH7cUoOPx95zzzfrmJcRoh9C_Rd-WD3kl4F0M-UNetAlRbMVU
|
||||||
|
2025-12-03 10:38:23 - alpine_bits_python.hotel_service - INFO - Config sync complete: 4 hotels created, 0 updated, 8 endpoints created
|
||||||
|
2025-12-03 10:38:23 - alpine_bits_python.db_setup - INFO - Config sync: 4 hotels created, 0 updated, 8 endpoints created
|
||||||
|
2025-12-03 10:38:24 - alpine_bits_python.db_setup - INFO - Backfilling advertising account IDs for existing reservations...
|
||||||
|
2025-12-03 10:38:24 - alpine_bits_python.db_setup - INFO - Found 4 hotel(s) with account configurations
|
||||||
|
2025-12-03 10:38:40 - root - INFO - Logging to file: config/alpinebits.log
|
||||||
|
2025-12-03 10:38:40 - root - INFO - Logging configured at INFO level
|
||||||
|
2025-12-03 10:38:40 - alpine_bits_python.notification_service - INFO - Registered notification backend: pushover
|
||||||
|
2025-12-03 10:38:40 - alpine_bits_python.notification_manager - INFO - Registered pushover backend with priority 0
|
||||||
|
2025-12-03 10:38:40 - alpine_bits_python.notification_manager - INFO - Notification service configured with backends: ['pushover']
|
||||||
|
2025-12-03 10:38:40 - alpine_bits_python.api - INFO - Application startup initiated (primary_worker=True)
|
||||||
|
2025-12-03 10:38:40 - alpine_bits_python.db - INFO - Configured database schema: alpinebits
|
||||||
|
2025-12-03 10:38:40 - alpine_bits_python.db - INFO - Setting PostgreSQL search_path to: alpinebits,public
|
||||||
|
2025-12-03 10:38:40 - alpine_bits_python.alpinebits_server - INFO - Initializing action instance for AlpineBitsActionName.OTA_HOTEL_NOTIF_REPORT
|
||||||
|
2025-12-03 10:38:40 - alpine_bits_python.alpinebits_server - INFO - Initializing action instance for AlpineBitsActionName.OTA_PING
|
||||||
|
2025-12-03 10:38:40 - alpine_bits_python.alpinebits_server - INFO - Initializing action instance for AlpineBitsActionName.OTA_HOTEL_RES_NOTIF_GUEST_REQUESTS
|
||||||
|
2025-12-03 10:38:40 - alpine_bits_python.alpinebits_server - INFO - Initializing action instance for AlpineBitsActionName.OTA_READ
|
||||||
|
2025-12-03 10:38:40 - alpine_bits_python.webhook_processor - INFO - Registered webhook processor: wix_form
|
||||||
|
2025-12-03 10:38:40 - alpine_bits_python.webhook_processor - INFO - Registered webhook processor: generic
|
||||||
|
2025-12-03 10:38:40 - alpine_bits_python.webhook_processor - INFO - Webhook processors initialized
|
||||||
|
2025-12-03 10:38:40 - alpine_bits_python.api - INFO - Webhook processors initialized
|
||||||
|
2025-12-03 10:38:40 - alpine_bits_python.api - INFO - Hotel 39054_001 has no push_endpoint configured
|
||||||
|
2025-12-03 10:38:40 - alpine_bits_python.api - INFO - Hotel 135 has no push_endpoint configured
|
||||||
|
2025-12-03 10:38:40 - alpine_bits_python.api - INFO - Hotel 39052_001 has no push_endpoint configured
|
||||||
|
2025-12-03 10:38:40 - alpine_bits_python.api - INFO - Hotel 39040_001 has no push_endpoint configured
|
||||||
|
2025-12-03 10:38:40 - alpine_bits_python.api - INFO - Running startup tasks (primary worker)...
|
||||||
|
2025-12-03 10:38:40 - alpine_bits_python.hotel_service - INFO - Config sync complete: 0 hotels created, 4 updated, 0 endpoints created
|
||||||
|
2025-12-03 10:38:40 - alpine_bits_python.db_setup - INFO - Config sync: 0 hotels created, 4 updated, 0 endpoints created
|
||||||
|
2025-12-03 10:38:41 - alpine_bits_python.db_setup - INFO - Backfilling advertising account IDs for existing reservations...
|
||||||
|
2025-12-03 10:38:41 - alpine_bits_python.db_setup - INFO - Found 4 hotel(s) with account configurations
|
||||||
|
2025-12-03 10:38:53 - root - INFO - Logging to file: config/alpinebits.log
|
||||||
|
2025-12-03 10:38:53 - root - INFO - Logging configured at INFO level
|
||||||
|
2025-12-03 10:38:53 - alpine_bits_python.notification_service - INFO - Registered notification backend: pushover
|
||||||
|
2025-12-03 10:38:53 - alpine_bits_python.notification_manager - INFO - Registered pushover backend with priority 0
|
||||||
|
2025-12-03 10:38:53 - alpine_bits_python.notification_manager - INFO - Notification service configured with backends: ['pushover']
|
||||||
|
2025-12-03 10:38:53 - alpine_bits_python.api - INFO - Application startup initiated (primary_worker=True)
|
||||||
|
2025-12-03 10:38:53 - alpine_bits_python.db - INFO - Configured database schema: alpinebits
|
||||||
|
2025-12-03 10:38:53 - alpine_bits_python.db - INFO - Setting PostgreSQL search_path to: alpinebits,public
|
||||||
|
2025-12-03 10:38:53 - alpine_bits_python.alpinebits_server - INFO - Initializing action instance for AlpineBitsActionName.OTA_HOTEL_NOTIF_REPORT
|
||||||
|
2025-12-03 10:38:53 - alpine_bits_python.alpinebits_server - INFO - Initializing action instance for AlpineBitsActionName.OTA_PING
|
||||||
|
2025-12-03 10:38:53 - alpine_bits_python.alpinebits_server - INFO - Initializing action instance for AlpineBitsActionName.OTA_HOTEL_RES_NOTIF_GUEST_REQUESTS
|
||||||
|
2025-12-03 10:38:53 - alpine_bits_python.alpinebits_server - INFO - Initializing action instance for AlpineBitsActionName.OTA_READ
|
||||||
|
2025-12-03 10:38:53 - alpine_bits_python.webhook_processor - INFO - Registered webhook processor: wix_form
|
||||||
|
2025-12-03 10:38:53 - alpine_bits_python.webhook_processor - INFO - Registered webhook processor: generic
|
||||||
|
2025-12-03 10:38:53 - alpine_bits_python.webhook_processor - INFO - Webhook processors initialized
|
||||||
|
2025-12-03 10:38:53 - alpine_bits_python.api - INFO - Webhook processors initialized
|
||||||
|
2025-12-03 10:38:53 - alpine_bits_python.api - INFO - Hotel 39054_001 has no push_endpoint configured
|
||||||
|
2025-12-03 10:38:53 - alpine_bits_python.api - INFO - Hotel 135 has no push_endpoint configured
|
||||||
|
2025-12-03 10:38:53 - alpine_bits_python.api - INFO - Hotel 39052_001 has no push_endpoint configured
|
||||||
|
2025-12-03 10:38:53 - alpine_bits_python.api - INFO - Hotel 39040_001 has no push_endpoint configured
|
||||||
|
2025-12-03 10:38:53 - alpine_bits_python.api - INFO - Running startup tasks (primary worker)...
|
||||||
|
2025-12-03 10:38:53 - alpine_bits_python.hotel_service - INFO - Config sync complete: 0 hotels created, 4 updated, 0 endpoints created
|
||||||
|
2025-12-03 10:38:53 - alpine_bits_python.db_setup - INFO - Config sync: 0 hotels created, 4 updated, 0 endpoints created
|
||||||
|
2025-12-03 10:38:54 - alpine_bits_python.db_setup - INFO - Backfilling advertising account IDs for existing reservations...
|
||||||
|
2025-12-03 10:38:54 - alpine_bits_python.db_setup - INFO - Found 4 hotel(s) with account configurations
|
||||||
|
2025-12-03 10:38:54 - alpine_bits_python.db_setup - INFO - Backfilling usernames for existing acked_requests...
|
||||||
|
2025-12-03 10:38:54 - alpine_bits_python.db_setup - INFO - Found 4 hotel(s) with usernames in config
|
||||||
|
2025-12-03 10:38:54 - alpine_bits_python.db_setup - INFO - Checking for stuck webhooks to reprocess...
|
||||||
|
2025-12-03 10:38:54 - alpine_bits_python.db_setup - INFO - No stuck webhooks found
|
||||||
|
2025-12-03 10:38:54 - alpine_bits_python.api - INFO - Startup tasks completed
|
||||||
|
2025-12-03 10:38:54 - alpine_bits_python.api - INFO - Webhook periodic cleanup task started
|
||||||
|
2025-12-03 10:38:54 - alpine_bits_python.api - INFO - Application startup complete
|
||||||
|
2025-12-03 10:39:31 - alpine_bits_python.api - INFO - Application shutdown initiated
|
||||||
|
2025-12-03 10:39:31 - alpine_bits_python.api - INFO - Webhook cleanup task cancelled
|
||||||
|
2025-12-03 10:39:31 - alpine_bits_python.api - INFO - Webhook cleanup task stopped
|
||||||
|
2025-12-03 10:39:31 - alpine_bits_python.email_service - INFO - Shutting down email service thread pool
|
||||||
|
2025-12-03 10:39:31 - alpine_bits_python.email_service - INFO - Email service thread pool shut down complete
|
||||||
|
2025-12-03 10:39:31 - alpine_bits_python.api - INFO - Email service shut down
|
||||||
|
2025-12-03 10:39:31 - alpine_bits_python.api - INFO - Application shutdown complete
|
||||||
|
2025-12-03 10:39:31 - alpine_bits_python.worker_coordination - INFO - Released primary worker lock (pid=34567)
|
||||||
|
2025-12-03 10:39:34 - root - INFO - Logging to file: config/alpinebits.log
|
||||||
|
2025-12-03 10:39:34 - root - INFO - Logging configured at INFO level
|
||||||
|
2025-12-03 10:39:34 - alpine_bits_python.notification_service - INFO - Registered notification backend: pushover
|
||||||
|
2025-12-03 10:39:34 - alpine_bits_python.notification_manager - INFO - Registered pushover backend with priority 0
|
||||||
|
2025-12-03 10:39:34 - alpine_bits_python.notification_manager - INFO - Notification service configured with backends: ['pushover']
|
||||||
|
2025-12-03 10:39:34 - alpine_bits_python.api - INFO - Application startup initiated (primary_worker=True)
|
||||||
|
2025-12-03 10:39:34 - alpine_bits_python.db - INFO - Configured database schema: alpinebits
|
||||||
|
2025-12-03 10:39:34 - alpine_bits_python.db - INFO - Setting PostgreSQL search_path to: alpinebits,public
|
||||||
|
2025-12-03 10:39:34 - alpine_bits_python.alpinebits_server - INFO - Initializing action instance for AlpineBitsActionName.OTA_HOTEL_NOTIF_REPORT
|
||||||
|
2025-12-03 10:39:34 - alpine_bits_python.alpinebits_server - INFO - Initializing action instance for AlpineBitsActionName.OTA_PING
|
||||||
|
2025-12-03 10:39:34 - alpine_bits_python.alpinebits_server - INFO - Initializing action instance for AlpineBitsActionName.OTA_HOTEL_RES_NOTIF_GUEST_REQUESTS
|
||||||
|
2025-12-03 10:39:34 - alpine_bits_python.alpinebits_server - INFO - Initializing action instance for AlpineBitsActionName.OTA_READ
|
||||||
|
2025-12-03 10:39:34 - alpine_bits_python.webhook_processor - INFO - Registered webhook processor: wix_form
|
||||||
|
2025-12-03 10:39:34 - alpine_bits_python.webhook_processor - INFO - Registered webhook processor: generic
|
||||||
|
2025-12-03 10:39:34 - alpine_bits_python.webhook_processor - INFO - Webhook processors initialized
|
||||||
|
2025-12-03 10:39:34 - alpine_bits_python.api - INFO - Webhook processors initialized
|
||||||
|
2025-12-03 10:39:34 - alpine_bits_python.api - INFO - Hotel 39054_001 has no push_endpoint configured
|
||||||
|
2025-12-03 10:39:34 - alpine_bits_python.api - INFO - Hotel 135 has no push_endpoint configured
|
||||||
|
2025-12-03 10:39:34 - alpine_bits_python.api - INFO - Hotel 39052_001 has no push_endpoint configured
|
||||||
|
2025-12-03 10:39:34 - alpine_bits_python.api - INFO - Hotel 39040_001 has no push_endpoint configured
|
||||||
|
2025-12-03 10:39:34 - alpine_bits_python.api - INFO - Running startup tasks (primary worker)...
|
||||||
|
2025-12-03 10:39:34 - alpine_bits_python.hotel_service - INFO - Config sync complete: 0 hotels created, 4 updated, 0 endpoints created
|
||||||
|
2025-12-03 10:39:34 - alpine_bits_python.db_setup - INFO - Config sync: 0 hotels created, 4 updated, 0 endpoints created
|
||||||
|
2025-12-03 10:39:35 - alpine_bits_python.db_setup - INFO - Backfilling advertising account IDs for existing reservations...
|
||||||
|
2025-12-03 10:39:35 - alpine_bits_python.db_setup - INFO - Found 4 hotel(s) with account configurations
|
||||||
|
2025-12-03 10:39:35 - alpine_bits_python.db_setup - INFO - Backfilling usernames for existing acked_requests...
|
||||||
|
2025-12-03 10:39:35 - alpine_bits_python.db_setup - INFO - Found 4 hotel(s) with usernames in config
|
||||||
|
2025-12-03 10:39:35 - alpine_bits_python.db_setup - INFO - Checking for stuck webhooks to reprocess...
|
||||||
|
2025-12-03 10:39:35 - alpine_bits_python.db_setup - INFO - No stuck webhooks found
|
||||||
|
2025-12-03 10:39:35 - alpine_bits_python.api - INFO - Startup tasks completed
|
||||||
|
2025-12-03 10:39:35 - alpine_bits_python.api - INFO - Webhook periodic cleanup task started
|
||||||
|
2025-12-03 10:39:35 - alpine_bits_python.api - INFO - Application startup complete
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.api - INFO - AlpineBits authentication successful for user: bemelman (from config)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.api - INFO - XML file queued for processing: logs/conversions_import/file_bemelman_20251203_103950.xml by user bemelman (original: file.xml)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.api - INFO - Starting database processing of file.xml
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Loaded 1764 reservations into cache
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Reservation cache initialized with 6 hotel codes
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Processing deleted reservation: Hotel 39054_001, PMS ID 74423
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Processing 32 reservations in xml
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Phase 0: Extracted 24 unique guests from 32 reservations
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Phase 1: Successfully upserted 24 guests
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 32770 (pms_id=65675)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 36046 (pms_id=71642)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 34158 (pms_id=68197)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 36811 (pms_id=73332)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 35904 (pms_id=71360)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37465 (pms_id=74400)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37466 (pms_id=74401)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37471 (pms_id=74406)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37472 (pms_id=74407)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37473 (pms_id=74408)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37474 (pms_id=74409)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37475 (pms_id=74410)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37476 (pms_id=74412)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37477 (pms_id=74411)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37478 (pms_id=74413)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37479 (pms_id=74414)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37398 (pms_id=74315)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37212 (pms_id=74028)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37480 (pms_id=74415)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37210 (pms_id=74027)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37481 (pms_id=74416)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37483 (pms_id=74417)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37446 (pms_id=74380)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37437 (pms_id=74369)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37484 (pms_id=74418)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37482 (pms_id=74419)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37486 (pms_id=74420)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37487 (pms_id=74421)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37489 (pms_id=74422)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37485 (pms_id=74424)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37488 (pms_id=74425)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Updated conversion 37490 (pms_id=74426)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Phase 3a: Matched conversion by advertising ID (pms_id=74401, reservation_id=1736)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Phase 3a: Matched conversion by advertising ID (pms_id=74411, reservation_id=1751)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Phase 3a: Matched conversion by advertising ID (pms_id=74027, reservation_id=503)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Phase 3a: Matched conversion by advertising ID (pms_id=74028, reservation_id=503)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Phase 3a: Matched conversion by advertising ID (pms_id=74413, reservation_id=1749)
|
||||||
|
2025-12-03 10:39:50 - alpine_bits_python.conversion_service - INFO - Phase 3a: Matched conversion by advertising ID (pms_id=74424, reservation_id=1754)
|
||||||
|
2025-12-03 10:39:54 - alpine_bits_python.conversion_service - INFO - Phase 3b: Found 22138 unique guests from 34438 unmatched conversions
|
||||||
|
2025-12-03 10:40:12 - alpine_bits_python.api - INFO - Conversion processing complete for file.xml: {'total_reservations': 32, 'deleted_reservations': 1, 'total_daily_sales': 501, 'matched_to_reservation': 6, 'matched_to_customer': 0, 'matched_to_hashed_customer': 0, 'unmatched': 26, 'errors': 0}
|
||||||
|
2025-12-03 10:41:22 - alpine_bits_python.api - INFO - Application shutdown initiated
|
||||||
|
2025-12-03 10:41:22 - alpine_bits_python.api - INFO - Webhook cleanup task cancelled
|
||||||
|
2025-12-03 10:41:22 - alpine_bits_python.api - INFO - Webhook cleanup task stopped
|
||||||
|
2025-12-03 10:41:22 - alpine_bits_python.email_service - INFO - Shutting down email service thread pool
|
||||||
|
2025-12-03 10:41:22 - alpine_bits_python.email_service - INFO - Email service thread pool shut down complete
|
||||||
|
2025-12-03 10:41:22 - alpine_bits_python.api - INFO - Email service shut down
|
||||||
|
2025-12-03 10:41:22 - alpine_bits_python.api - INFO - Application shutdown complete
|
||||||
|
2025-12-03 10:41:22 - alpine_bits_python.worker_coordination - INFO - Released primary worker lock (pid=34833)
|
||||||
|
|||||||
396
database_schema_analysis.md
Normal file
396
database_schema_analysis.md
Normal file
@@ -0,0 +1,396 @@
|
|||||||
|
# Database Schema Analysis
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
This document analyzes the database schema for normalization issues, redundancy, and potential improvements.
|
||||||
|
|
||||||
|
## Schema Summary
|
||||||
|
The database contains 13 tables organized around several core concepts:
|
||||||
|
- **Customer/Guest Management**: `customers`, `hashed_customers`, `conversion_guests`
|
||||||
|
- **Reservations**: `reservations`, `conversions`, `conversion_rooms`
|
||||||
|
- **Hotels**: `hotels`, `hotel_inventory`, `room_availability`
|
||||||
|
- **Webhooks**: `webhook_endpoints`, `webhook_requests`
|
||||||
|
- **Tracking**: `acked_requests`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Major Issues Identified
|
||||||
|
|
||||||
|
### 1. **CRITICAL: Dual Customer Systems (Data Duplication)**
|
||||||
|
|
||||||
|
**Problem**: The schema maintains two parallel customer tracking systems:
|
||||||
|
- `customers` + `hashed_customers` (from Wix forms)
|
||||||
|
- `conversion_guests` (from PMS)
|
||||||
|
|
||||||
|
**Impact**:
|
||||||
|
- Same person can exist in both systems with no linkage
|
||||||
|
- `conversion_guests.hashed_customer_id` attempts to link but this is backward (many-to-one instead of one-to-one)
|
||||||
|
- Data inconsistency when same guest appears in both sources
|
||||||
|
|
||||||
|
**Details**:
|
||||||
|
```
|
||||||
|
customers (id=1, email="john@example.com")
|
||||||
|
└─ hashed_customers (id=1, customer_id=1, hashed_email="abc123...")
|
||||||
|
|
||||||
|
conversion_guests (hotel_id="HOTEL1", guest_id=42, guest_email="john@example.com")
|
||||||
|
└─ hashed_customer_id = NULL (or points to hashed_customers.id=1 after matching)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Recommendation**:
|
||||||
|
- Create a unified `persons` table with a `source` field ("wix", "pms", "merged")
|
||||||
|
- Both `customers` and `conversion_guests` should reference this unified entity
|
||||||
|
- Implement proper guest matching/merging logic
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 2. **Data Redundancy: Hashed Values Stored Separately**
|
||||||
|
|
||||||
|
**Problem**: `hashed_customers` and `conversion_guests` store hashed values in separate columns alongside originals.
|
||||||
|
|
||||||
|
**Current Structure**:
|
||||||
|
```
|
||||||
|
customers:
|
||||||
|
- email_address (plaintext)
|
||||||
|
- phone (plaintext)
|
||||||
|
|
||||||
|
hashed_customers:
|
||||||
|
- customer_id (FK to customers)
|
||||||
|
- hashed_email
|
||||||
|
- hashed_phone
|
||||||
|
- hashed_given_name
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
**Issues**:
|
||||||
|
- Violates 3NF (derived data stored in separate table)
|
||||||
|
- Synchronization required between `customers` and `hashed_customers`
|
||||||
|
- If customer data changes, hashed version can become stale
|
||||||
|
- Extra JOIN required for every Meta Conversion API call
|
||||||
|
|
||||||
|
**Better Approach**:
|
||||||
|
Option A: Store hashed values directly in `customers` table as additional columns
|
||||||
|
Option B: Compute hashes on-the-fly (SHA256 is fast, ~1-2ms per hash)
|
||||||
|
|
||||||
|
**Recommendation**:
|
||||||
|
- **Short term**: Keep current structure but add triggers to auto-update hashed values
|
||||||
|
- **Long term**: Move hashed columns into `customers` table directly
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 3. **Advertising Account IDs Duplicated Across Tables**
|
||||||
|
|
||||||
|
**Problem**: `meta_account_id` and `google_account_id` appear in 3 places:
|
||||||
|
- `hotels` table (canonical source)
|
||||||
|
- `reservations` table (copied at creation time)
|
||||||
|
- Derived from `fbclid`/`gclid` tracking parameters
|
||||||
|
|
||||||
|
**Current Flow**:
|
||||||
|
```
|
||||||
|
hotels.meta_account_id = "123456"
|
||||||
|
↓
|
||||||
|
reservation created with fbclid
|
||||||
|
↓
|
||||||
|
reservations.meta_account_id = "123456" (copied from hotels)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Issues**:
|
||||||
|
- Denormalization without clear benefit
|
||||||
|
- If hotel's account ID changes, old reservations have stale data
|
||||||
|
- Mixed source of truth (sometimes from hotels, sometimes from tracking params)
|
||||||
|
|
||||||
|
**Recommendation**:
|
||||||
|
- Remove `meta_account_id` and `google_account_id` from `reservations`
|
||||||
|
- Always derive from `hotels` table via JOIN
|
||||||
|
- If tracking-derived account differs from hotel's account, log a warning
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 4. **Hotel Information Duplicated in Reservations**
|
||||||
|
|
||||||
|
**Problem**: `reservations` table stores `hotel_code` and `hotel_name` but has no FK to `hotels` table.
|
||||||
|
|
||||||
|
**Issues**:
|
||||||
|
- Data can become inconsistent if hotel name changes
|
||||||
|
- No referential integrity
|
||||||
|
- Unclear if `hotel_code` matches `hotels.hotel_id`
|
||||||
|
|
||||||
|
**Recommendation**:
|
||||||
|
- Add `hotel_id` FK column to `reservations` pointing to `hotels.hotel_id`
|
||||||
|
- Remove `hotel_code` and `hotel_name` columns
|
||||||
|
- Derive hotel information via JOIN when needed
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 5. **Weak Foreign Key Consistency**
|
||||||
|
|
||||||
|
**Problem**: Mixed use of `ON DELETE` policies:
|
||||||
|
- Some FKs use `SET NULL` (appropriate for nullable relationships)
|
||||||
|
- Some use `CASCADE` (appropriate for child records)
|
||||||
|
- Some use `NO ACTION` (prevents deletion, may cause issues)
|
||||||
|
- `conversions` table has confusing composite FK setup with `hotel_id` and `guest_id`
|
||||||
|
|
||||||
|
**Examples**:
|
||||||
|
```sql
|
||||||
|
-- Good: Child data should be deleted with parent
|
||||||
|
hotel_inventory.hotel_id → hotels.hotel_id (ON DELETE CASCADE)
|
||||||
|
|
||||||
|
-- Questionable: Should webhook requests survive hotel deletion?
|
||||||
|
webhook_requests.hotel_id → hotels.hotel_id (ON DELETE NO ACTION)
|
||||||
|
|
||||||
|
-- Inconsistent: Why SET NULL vs CASCADE?
|
||||||
|
reservations.customer_id → customers.id (ON DELETE SET NULL)
|
||||||
|
reservations.hashed_customer_id → hashed_customers.id (ON DELETE CASCADE)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Recommendation**:
|
||||||
|
Review each FK and establish consistent policies:
|
||||||
|
- Core data (hotels, customers): SET NULL to preserve historical records
|
||||||
|
- Supporting data (hashed_customers, inventory): CASCADE
|
||||||
|
- Transactional data (webhooks, conversions): Decide on retention policy
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 6. **Confusing Composite Foreign Key in Conversions**
|
||||||
|
|
||||||
|
**Problem**: The `conversions` table has a composite FK that's incorrectly mapped:
|
||||||
|
|
||||||
|
```python
|
||||||
|
# In db.py lines 650-655
|
||||||
|
__table_args__ = (
|
||||||
|
ForeignKeyConstraint(
|
||||||
|
["hotel_id", "guest_id"],
|
||||||
|
["conversion_guests.hotel_id", "conversion_guests.guest_id"],
|
||||||
|
ondelete="SET NULL",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
**But the database shows**:
|
||||||
|
```
|
||||||
|
Foreign Keys:
|
||||||
|
hotel_id -> conversion_guests.hotel_id (ON DELETE SET NULL)
|
||||||
|
guest_id -> conversion_guests.hotel_id (ON DELETE SET NULL) # ← WRONG!
|
||||||
|
guest_id -> conversion_guests.guest_id (ON DELETE SET NULL)
|
||||||
|
hotel_id -> conversion_guests.guest_id (ON DELETE SET NULL) # ← WRONG!
|
||||||
|
```
|
||||||
|
|
||||||
|
**Impact**:
|
||||||
|
- Database has 4 FKs instead of 1 composite FK
|
||||||
|
- Mapping is incorrect (guest_id → hotel_id doesn't make sense)
|
||||||
|
- Could cause constraint violations or allow orphaned records
|
||||||
|
|
||||||
|
**Recommendation**:
|
||||||
|
- Fix the composite FK definition in SQLAlchemy
|
||||||
|
- Run a migration to drop incorrect FKs and recreate properly
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 7. **Unclear Relationship Between Reservations and Conversions**
|
||||||
|
|
||||||
|
**Problem**: The relationship between `reservations` (from Wix forms) and `conversions` (from PMS) is complex:
|
||||||
|
|
||||||
|
```
|
||||||
|
conversions:
|
||||||
|
- reservation_id (FK to reservations) - matched by tracking IDs
|
||||||
|
- customer_id (FK to customers) - matched by guest details
|
||||||
|
- hashed_customer_id (FK to hashed_customers) - matched by hashed guest details
|
||||||
|
- guest_id (FK to conversion_guests) - the actual PMS guest
|
||||||
|
```
|
||||||
|
|
||||||
|
**Issues**:
|
||||||
|
- Three different FK fields to three different customer/guest tables
|
||||||
|
- Matching logic is unclear from schema alone
|
||||||
|
- `directly_attributable` and `guest_matched` flags indicate matching quality, but this should be more explicit
|
||||||
|
|
||||||
|
**Recommendation**:
|
||||||
|
- Add a `match_confidence` enum field: "exact_id", "high_confidence", "medium_confidence", "no_match"
|
||||||
|
- Add `match_method` field to explain how the link was made
|
||||||
|
- Consider a separate `reservation_conversion_links` table to make the many-to-many relationship explicit
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 8. **Room Type Information Scattered**
|
||||||
|
|
||||||
|
**Problem**: Room information appears in multiple places:
|
||||||
|
- `reservations.room_type_code`, `room_classification_code`, `room_type`
|
||||||
|
- `conversion_rooms.room_type`, `room_number`
|
||||||
|
- `hotel_inventory.inv_type_code`, `inv_code`, `room_name`
|
||||||
|
|
||||||
|
**Issues**:
|
||||||
|
- No clear master data for room types
|
||||||
|
- Room type codes not standardized across sources
|
||||||
|
- No FK between `reservations.room_type_code` and `hotel_inventory.inv_type_code`
|
||||||
|
|
||||||
|
**Recommendation**:
|
||||||
|
- Create a `room_types` reference table linked to hotels
|
||||||
|
- Add FKs from reservations and conversion_rooms to room_types
|
||||||
|
- Standardize room type codes across all sources
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Normalization Analysis
|
||||||
|
|
||||||
|
### 1st Normal Form (1NF): ✅ PASS
|
||||||
|
- All columns contain atomic values
|
||||||
|
- **Exception**: `reservations.children_ages` stores comma-separated values
|
||||||
|
- Should be: separate `reservation_children` table with age column
|
||||||
|
|
||||||
|
### 2nd Normal Form (2NF): ⚠️ MOSTLY PASS
|
||||||
|
- All non-key attributes depend on the full primary key
|
||||||
|
- **Issue**: Some denormalized data exists (hotel names, account IDs in reservations)
|
||||||
|
|
||||||
|
### 3rd Normal Form (3NF): ❌ FAIL
|
||||||
|
Multiple violations:
|
||||||
|
- `hashed_customers` stores derived data (hashes) that depend on `customers`
|
||||||
|
- `reservations.meta_account_id` depends on `hotels` via hotel_code
|
||||||
|
- `reservations.hotel_name` depends on `hotels` via hotel_code
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Data Integrity Issues
|
||||||
|
|
||||||
|
### Missing Foreign Keys
|
||||||
|
1. **reservations.hotel_code** → should FK to hotels.hotel_id
|
||||||
|
2. **reservations.room_type_code** → should FK to hotel_inventory
|
||||||
|
3. **acked_requests.unique_id** → should FK to reservations.unique_id (or be nullable)
|
||||||
|
|
||||||
|
### Missing Indexes
|
||||||
|
Consider adding for query performance:
|
||||||
|
1. `customers.email_address` - for lookups during conversion matching
|
||||||
|
2. `conversions.reservation_date` - for time-based queries
|
||||||
|
3. `conversion_rooms.total_revenue` - for revenue analytics
|
||||||
|
4. `reservations.start_date`, `end_date` - for date range queries
|
||||||
|
|
||||||
|
### Missing Constraints
|
||||||
|
1. **Check constraints** for date logic:
|
||||||
|
- `reservations.end_date > start_date`
|
||||||
|
- `conversion_rooms.departure_date > arrival_date`
|
||||||
|
|
||||||
|
2. **Check constraints** for counts:
|
||||||
|
- `num_adults >= 0`, `num_children >= 0`
|
||||||
|
|
||||||
|
3. **NOT NULL constraints** on critical fields:
|
||||||
|
- `customers.contact_id` should be NOT NULL (it's the natural key)
|
||||||
|
- `conversions.hotel_id` is NOT NULL ✓ (good)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Recommendations Priority
|
||||||
|
|
||||||
|
### HIGH PRIORITY (Data Integrity)
|
||||||
|
1. Fix composite FK in `conversions` table (lines 650-655 in db.py)
|
||||||
|
2. Add `hotel_id` FK to `reservations` table
|
||||||
|
3. Add missing NOT NULL constraints on natural keys
|
||||||
|
4. Add check constraints for date ranges and counts
|
||||||
|
|
||||||
|
### MEDIUM PRIORITY (Normalization)
|
||||||
|
5. Unify customer/guest systems into a single `persons` entity
|
||||||
|
6. Remove duplicate account ID fields from `reservations`
|
||||||
|
7. Remove `hotel_name` from `reservations` (derive via JOIN)
|
||||||
|
8. Create `reservation_children` table for children_ages
|
||||||
|
|
||||||
|
### LOW PRIORITY (Performance & Cleanup)
|
||||||
|
9. Move hashed fields into `customers` table (remove `hashed_customers`)
|
||||||
|
10. Add indexes for common query patterns
|
||||||
|
11. Create `room_types` reference table
|
||||||
|
12. Add `match_confidence` and `match_method` to `conversions`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Positive Aspects
|
||||||
|
|
||||||
|
✅ Good use of composite keys (`conversion_guests`, `hotel_inventory`)
|
||||||
|
✅ Unique constraints on natural keys (`contact_id`, `webhook_secret`)
|
||||||
|
✅ Proper use of indexes on frequently queried fields
|
||||||
|
✅ Cascade deletion for child records (inventory, rooms)
|
||||||
|
✅ Tracking metadata (created_at, updated_at, first_seen, last_seen)
|
||||||
|
✅ Webhook deduplication via `payload_hash`
|
||||||
|
✅ JSON storage for flexible data (`conversion_rooms.daily_sales`)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Suggested Refactoring Path
|
||||||
|
|
||||||
|
### Phase 1: Fix Critical Issues (1-2 days)
|
||||||
|
- Fix composite FK in conversions
|
||||||
|
- Add hotel_id FK to reservations
|
||||||
|
- Add missing constraints
|
||||||
|
|
||||||
|
### Phase 2: Normalize Customer Data (3-5 days)
|
||||||
|
- Create unified persons/guests table
|
||||||
|
- Migrate existing data
|
||||||
|
- Update matching logic
|
||||||
|
|
||||||
|
### Phase 3: Clean Up Redundancy (2-3 days)
|
||||||
|
- Remove duplicate account IDs
|
||||||
|
- Merge hashed_customers into customers
|
||||||
|
- Create room_types reference
|
||||||
|
|
||||||
|
### Phase 4: Enhance Tracking (1-2 days)
|
||||||
|
- Add match_confidence fields
|
||||||
|
- Improve conversion attribution
|
||||||
|
- Add missing indexes
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Query Examples Affected by Current Issues
|
||||||
|
|
||||||
|
### Issue: Duplicate Customer Data
|
||||||
|
```sql
|
||||||
|
-- Current: Find all reservations for a guest (requires checking both systems)
|
||||||
|
SELECT r.* FROM reservations r
|
||||||
|
WHERE r.customer_id = ?
|
||||||
|
OR r.hashed_customer_id IN (
|
||||||
|
SELECT id FROM hashed_customers WHERE contact_id = ?
|
||||||
|
);
|
||||||
|
|
||||||
|
-- After fix: Simple unified query
|
||||||
|
SELECT r.* FROM reservations r
|
||||||
|
WHERE r.person_id = ?;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Issue: Missing Hotel FK
|
||||||
|
```sql
|
||||||
|
-- Current: Get hotel info for reservation (unreliable)
|
||||||
|
SELECT r.*, r.hotel_name
|
||||||
|
FROM reservations r
|
||||||
|
WHERE r.id = ?;
|
||||||
|
|
||||||
|
-- After fix: Reliable JOIN
|
||||||
|
SELECT r.*, h.hotel_name, h.meta_account_id
|
||||||
|
FROM reservations r
|
||||||
|
JOIN hotels h ON r.hotel_id = h.hotel_id
|
||||||
|
WHERE r.id = ?;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Issue: Hashed Data in Separate Table
|
||||||
|
```sql
|
||||||
|
-- Current: Get customer for Meta API (requires JOIN)
|
||||||
|
SELECT hc.hashed_email, hc.hashed_phone
|
||||||
|
FROM reservations r
|
||||||
|
JOIN hashed_customers hc ON r.hashed_customer_id = hc.id
|
||||||
|
WHERE r.id = ?;
|
||||||
|
|
||||||
|
-- After fix: Direct access
|
||||||
|
SELECT c.hashed_email, c.hashed_phone
|
||||||
|
FROM reservations r
|
||||||
|
JOIN customers c ON r.customer_id = c.id
|
||||||
|
WHERE r.id = ?;
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Conclusion
|
||||||
|
|
||||||
|
The schema is **functional but has significant normalization and consistency issues**. The main problems are:
|
||||||
|
|
||||||
|
1. **Dual customer tracking systems** that should be unified
|
||||||
|
2. **Redundant storage of derived data** (hashes, account IDs)
|
||||||
|
3. **Missing foreign key relationships** (hotels, room types)
|
||||||
|
4. **Inconsistent deletion policies** across foreign keys
|
||||||
|
5. **Broken composite foreign key** in conversions table
|
||||||
|
|
||||||
|
The database violates 3NF in several places and could benefit from a refactoring effort. However, the issues are primarily architectural rather than critical bugs, so the system can continue operating while improvements are made incrementally.
|
||||||
|
|
||||||
|
**Estimated effort to fix all issues**: 1-2 weeks of development + testing
|
||||||
|
**Risk level**: Medium (requires data migration and careful FK updates)
|
||||||
|
**Recommended approach**: Incremental fixes starting with high-priority items
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# Reset database and initialize Alembic from scratch
|
|
||||||
|
|
||||||
echo "=== Database Reset Script ==="
|
|
||||||
echo "This will drop all tables and reinitialize with Alembic"
|
|
||||||
echo ""
|
|
||||||
read -p "Are you sure? (type 'yes' to continue): " confirm
|
|
||||||
|
|
||||||
if [ "$confirm" != "yes" ]; then
|
|
||||||
echo "Aborted."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "Step 1: Dropping all tables in the database..."
|
|
||||||
echo "Connect to your database and run:"
|
|
||||||
echo ""
|
|
||||||
echo " -- For PostgreSQL:"
|
|
||||||
echo " DROP SCHEMA public CASCADE;"
|
|
||||||
echo " CREATE SCHEMA public;"
|
|
||||||
echo " GRANT ALL ON SCHEMA public TO <your_user>;"
|
|
||||||
echo " GRANT ALL ON SCHEMA public TO public;"
|
|
||||||
echo ""
|
|
||||||
echo " -- Or if using a custom schema (e.g., alpinebits):"
|
|
||||||
echo " DROP SCHEMA alpinebits CASCADE;"
|
|
||||||
echo " CREATE SCHEMA alpinebits;"
|
|
||||||
echo ""
|
|
||||||
echo "Press Enter after you've run the SQL commands..."
|
|
||||||
read
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "Step 2: Running Alembic migrations..."
|
|
||||||
uv run alembic upgrade head
|
|
||||||
|
|
||||||
if [ $? -eq 0 ]; then
|
|
||||||
echo ""
|
|
||||||
echo "=== Success! ==="
|
|
||||||
echo "Database has been reset and migrations applied."
|
|
||||||
echo ""
|
|
||||||
echo "Current migration status:"
|
|
||||||
uv run alembic current
|
|
||||||
else
|
|
||||||
echo ""
|
|
||||||
echo "=== Error ==="
|
|
||||||
echo "Migration failed. Check the error messages above."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
28
reset_db.sh
Executable file
28
reset_db.sh
Executable file
@@ -0,0 +1,28 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
|
||||||
|
# Recreate the database: run DROP and CREATE in separate psql calls (DROP DATABASE cannot run inside a transaction block)
|
||||||
|
if ! docker exec -i meta_timescaledb psql -U meta_user -d postgres -c "DROP DATABASE IF EXISTS meta_insights;"; then
|
||||||
|
echo "Error: failed to drop database 'meta_insights'." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! docker exec -i meta_timescaledb psql -U meta_user -d postgres -c "CREATE DATABASE meta_insights;"; then
|
||||||
|
echo "Error: failed to create database 'meta_insights'." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# then import dump specified by argument only if previous commands succeeded
|
||||||
|
if [ -n "$1" ]; then
|
||||||
|
DUMP_FILE="$1"
|
||||||
|
if [ ! -r "$DUMP_FILE" ]; then
|
||||||
|
echo "Error: dump file '$DUMP_FILE' does not exist or is not readable." >&2
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Importing dump from $DUMP_FILE"
|
||||||
|
if ! docker exec -i meta_timescaledb psql -U meta_user -d meta_insights < "$DUMP_FILE"; then
|
||||||
|
echo "Error: failed to import dump '$DUMP_FILE' into 'meta_insights'." >&2
|
||||||
|
exit 3
|
||||||
|
fi
|
||||||
|
fi
|
||||||
@@ -42,7 +42,7 @@ select res.id, res.created_at, con.created_at as "Con Created at", con.updated_a
|
|||||||
left join alpinebits.conversions as con on con.reservation_id = res.id
|
left join alpinebits.conversions as con on con.reservation_id = res.id
|
||||||
left join alpinebits.conversion_guests as g on g.guest_id = con.guest_id
|
left join alpinebits.conversion_guests as g on g.guest_id = con.guest_id
|
||||||
|
|
||||||
where hotel_code = '39054_001'
|
where hotel_id = '39054_001'
|
||||||
|
|
||||||
|
|
||||||
order by res.created_at desc limit 400
|
order by res.created_at desc limit 400
|
||||||
|
|||||||
@@ -768,9 +768,9 @@ def _process_single_reservation(
|
|||||||
hotel_reservation_id=[hotel_res_id]
|
hotel_reservation_id=[hotel_res_id]
|
||||||
)
|
)
|
||||||
|
|
||||||
if reservation.hotel_code is None:
|
if reservation.hotel_id is None:
|
||||||
raise ValueError("Reservation hotel_code is None")
|
raise ValueError("Reservation hotel_code is None")
|
||||||
hotel_code = str(reservation.hotel_code)
|
hotel_code = str(reservation.hotel_id)
|
||||||
hotel_name = None if reservation.hotel_name is None else str(reservation.hotel_name)
|
hotel_name = None if reservation.hotel_name is None else str(reservation.hotel_name)
|
||||||
|
|
||||||
basic_property_info = HotelReservation.ResGlobalInfo.BasicPropertyInfo(
|
basic_property_info = HotelReservation.ResGlobalInfo.BasicPropertyInfo(
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ from fastapi.security import (
|
|||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from slowapi.errors import RateLimitExceeded
|
from slowapi.errors import RateLimitExceeded
|
||||||
from sqlalchemy import and_, select, update
|
from sqlalchemy import and_, select, update
|
||||||
from sqlalchemy.ext.asyncio import async_sessionmaker
|
from sqlalchemy.ext.asyncio import async_sessionmaker, AsyncSession
|
||||||
from sqlalchemy.orm import selectinload
|
from sqlalchemy.orm import selectinload
|
||||||
|
|
||||||
from alpine_bits_python.hotel_service import HotelService
|
from alpine_bits_python.hotel_service import HotelService
|
||||||
@@ -138,7 +138,7 @@ async def push_listener(customer: DBCustomer, reservation: DBReservation, hotel)
|
|||||||
|
|
||||||
server: AlpineBitsServer = app.state.alpine_bits_server
|
server: AlpineBitsServer = app.state.alpine_bits_server
|
||||||
hotel_id = hotel["hotel_id"]
|
hotel_id = hotel["hotel_id"]
|
||||||
reservation_hotel_id = reservation.hotel_code
|
reservation_hotel_id = reservation.hotel_id
|
||||||
|
|
||||||
# Double-check hotel matching (should be guaranteed by dispatcher)
|
# Double-check hotel matching (should be guaranteed by dispatcher)
|
||||||
if hotel_id != reservation_hotel_id:
|
if hotel_id != reservation_hotel_id:
|
||||||
@@ -719,7 +719,7 @@ async def validate_basic_auth(
|
|||||||
async def handle_webhook_unified(
|
async def handle_webhook_unified(
|
||||||
request: Request,
|
request: Request,
|
||||||
webhook_secret: str,
|
webhook_secret: str,
|
||||||
db_session=Depends(get_async_session),
|
db_session: AsyncSession = Depends(get_async_session),
|
||||||
):
|
):
|
||||||
"""Unified webhook handler with deduplication and routing.
|
"""Unified webhook handler with deduplication and routing.
|
||||||
|
|
||||||
@@ -846,6 +846,9 @@ async def handle_webhook_unified(
|
|||||||
if not webhook_endpoint:
|
if not webhook_endpoint:
|
||||||
raise HTTPException(status_code=404, detail="Webhook not found")
|
raise HTTPException(status_code=404, detail="Webhook not found")
|
||||||
|
|
||||||
|
webhook_endpoint_id = webhook_endpoint.id
|
||||||
|
webhook_hotel_id = webhook_endpoint.hotel_id
|
||||||
|
|
||||||
# Verify hotel is active
|
# Verify hotel is active
|
||||||
if not webhook_endpoint.hotel.is_active:
|
if not webhook_endpoint.hotel.is_active:
|
||||||
raise HTTPException(status_code=404, detail="Hotel is not active")
|
raise HTTPException(status_code=404, detail="Hotel is not active")
|
||||||
@@ -860,8 +863,8 @@ async def handle_webhook_unified(
|
|||||||
|
|
||||||
webhook_request_data = WebhookRequestData(
|
webhook_request_data = WebhookRequestData(
|
||||||
payload_json=payload,
|
payload_json=payload,
|
||||||
webhook_endpoint_id=webhook_endpoint.id,
|
webhook_endpoint_id=webhook_endpoint_id,
|
||||||
hotel_id=webhook_endpoint.hotel_id,
|
hotel_id=webhook_hotel_id,
|
||||||
status=WebhookStatus.PROCESSING,
|
status=WebhookStatus.PROCESSING,
|
||||||
processing_started_at=timestamp,
|
processing_started_at=timestamp,
|
||||||
created_at=timestamp,
|
created_at=timestamp,
|
||||||
@@ -923,12 +926,17 @@ async def handle_webhook_unified(
|
|||||||
db_session.add(webhook_request)
|
db_session.add(webhook_request)
|
||||||
await db_session.flush()
|
await db_session.flush()
|
||||||
|
|
||||||
|
webhook_request_id = webhook_request.id
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# 6. Get processor for webhook_type
|
# 6. Get processor for webhook_type
|
||||||
processor = webhook_registry.get_processor(webhook_endpoint.webhook_type)
|
processor = webhook_registry.get_processor(webhook_endpoint.webhook_type)
|
||||||
if not processor:
|
if not processor:
|
||||||
raise ValueError(f"No processor for type: {webhook_endpoint.webhook_type}")
|
raise ValueError(f"No processor for type: {webhook_endpoint.webhook_type}")
|
||||||
|
|
||||||
|
# Persist the webhook row before handing off to processors
|
||||||
|
await db_session.commit()
|
||||||
|
|
||||||
# 7. Process webhook with simplified interface
|
# 7. Process webhook with simplified interface
|
||||||
result = await processor.process(
|
result = await processor.process(
|
||||||
webhook_request=webhook_request,
|
webhook_request=webhook_request,
|
||||||
@@ -937,34 +945,50 @@ async def handle_webhook_unified(
|
|||||||
event_dispatcher=request.app.state.event_dispatcher,
|
event_dispatcher=request.app.state.event_dispatcher,
|
||||||
)
|
)
|
||||||
|
|
||||||
# 8. Update status and link created entities when available
|
if not db_session.in_transaction():
|
||||||
webhook_request.status = WebhookStatus.COMPLETED
|
await db_session.begin()
|
||||||
webhook_request.processing_completed_at = datetime.now(UTC)
|
|
||||||
|
|
||||||
created_customer_id = result.get("customer_id") if isinstance(result, dict) else None
|
completion_values = {
|
||||||
created_reservation_id = (
|
"status": WebhookStatus.COMPLETED,
|
||||||
result.get("reservation_id") if isinstance(result, dict) else None
|
"processing_completed_at": datetime.now(UTC),
|
||||||
)
|
}
|
||||||
|
|
||||||
|
if isinstance(result, dict):
|
||||||
|
created_customer_id = result.get("customer_id")
|
||||||
|
created_reservation_id = result.get("reservation_id")
|
||||||
if created_customer_id:
|
if created_customer_id:
|
||||||
webhook_request.created_customer_id = created_customer_id
|
completion_values["created_customer_id"] = created_customer_id
|
||||||
if created_reservation_id:
|
if created_reservation_id:
|
||||||
webhook_request.created_reservation_id = created_reservation_id
|
completion_values["created_reservation_id"] = created_reservation_id
|
||||||
|
|
||||||
|
await db_session.execute(
|
||||||
|
update(WebhookRequest)
|
||||||
|
.where(WebhookRequest.id == webhook_request_id)
|
||||||
|
.values(**completion_values)
|
||||||
|
)
|
||||||
await db_session.commit()
|
await db_session.commit()
|
||||||
|
|
||||||
return {
|
return {
|
||||||
**result,
|
**result,
|
||||||
"webhook_id": webhook_request.id,
|
"webhook_id": webhook_request_id,
|
||||||
"hotel_id": webhook_endpoint.hotel_id,
|
"hotel_id": webhook_hotel_id,
|
||||||
}
|
}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
_LOGGER.exception("Error processing webhook: %s", e)
|
_LOGGER.exception("Error processing webhook: %s", e)
|
||||||
|
|
||||||
webhook_request.status = WebhookStatus.FAILED
|
await db_session.rollback()
|
||||||
webhook_request.last_error = str(e)[:2000]
|
if not db_session.in_transaction():
|
||||||
webhook_request.processing_completed_at = datetime.now(UTC)
|
await db_session.begin()
|
||||||
|
await db_session.execute(
|
||||||
|
update(WebhookRequest)
|
||||||
|
.where(WebhookRequest.id == webhook_request_id)
|
||||||
|
.values(
|
||||||
|
status=WebhookStatus.FAILED,
|
||||||
|
last_error=str(e)[:2000],
|
||||||
|
processing_completed_at=datetime.now(UTC),
|
||||||
|
)
|
||||||
|
)
|
||||||
await db_session.commit()
|
await db_session.commit()
|
||||||
|
|
||||||
raise HTTPException(status_code=500, detail="Error processing webhook")
|
raise HTTPException(status_code=500, detail="Error processing webhook")
|
||||||
@@ -1188,7 +1212,9 @@ async def _process_conversion_xml_background(
|
|||||||
# Now process the conversion XML
|
# Now process the conversion XML
|
||||||
_LOGGER.info("Starting database processing of %s", filename)
|
_LOGGER.info("Starting database processing of %s", filename)
|
||||||
conversion_service = ConversionService(session_maker, hotel.hotel_id)
|
conversion_service = ConversionService(session_maker, hotel.hotel_id)
|
||||||
processing_stats = await conversion_service.process_conversion_xml(xml_content)
|
processing_stats = await conversion_service.process_conversion_xml(xml_content, run_full_guest_matching=True)
|
||||||
|
|
||||||
|
await conversion_service.classify_regular_guests(24)
|
||||||
|
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
"Conversion processing complete for %s: %s", filename, processing_stats
|
"Conversion processing complete for %s: %s", filename, processing_stats
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -472,7 +472,7 @@ class CSVImporter:
|
|||||||
num_adults=num_adults,
|
num_adults=num_adults,
|
||||||
num_children=num_children,
|
num_children=num_children,
|
||||||
children_ages=children_ages,
|
children_ages=children_ages,
|
||||||
hotel_code=final_hotel_code,
|
hotel_id=final_hotel_code,
|
||||||
hotel_name=final_hotel_name,
|
hotel_name=final_hotel_name,
|
||||||
offer=str(row.get("room_offer", "")).strip() or None,
|
offer=str(row.get("room_offer", "")).strip() or None,
|
||||||
user_comment=str(row.get("message", "")).strip() or None,
|
user_comment=str(row.get("message", "")).strip() or None,
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ from pydantic import ValidationError
|
|||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
from .db import Customer, HashedCustomer
|
from .db import Customer
|
||||||
from .logging_config import get_logger
|
from .logging_config import get_logger
|
||||||
from .schemas import CustomerData
|
from .schemas import CustomerData
|
||||||
|
|
||||||
@@ -53,13 +53,13 @@ class CustomerService:
|
|||||||
if "phone" in customer_data:
|
if "phone" in customer_data:
|
||||||
customer.phone = customer_data["phone"]
|
customer.phone = customer_data["phone"]
|
||||||
|
|
||||||
self.session.add(customer)
|
# Set creation timestamp
|
||||||
await self.session.flush() # Flush to get the customer.id
|
customer.created_at = datetime.now(UTC)
|
||||||
|
|
||||||
# Create hashed version
|
# Update hashed fields
|
||||||
hashed_customer = customer.create_hashed_customer()
|
customer.update_hashed_fields()
|
||||||
hashed_customer.created_at = datetime.now(UTC)
|
|
||||||
self.session.add(hashed_customer)
|
self.session.add(customer)
|
||||||
|
|
||||||
if auto_commit:
|
if auto_commit:
|
||||||
await self.session.commit()
|
await self.session.commit()
|
||||||
@@ -130,29 +130,8 @@ class CustomerService:
|
|||||||
if "phone" in update_data:
|
if "phone" in update_data:
|
||||||
customer.phone = update_data["phone"]
|
customer.phone = update_data["phone"]
|
||||||
|
|
||||||
# Update or create hashed version
|
# Update hashed fields
|
||||||
result = await self.session.execute(
|
customer.update_hashed_fields()
|
||||||
select(HashedCustomer).where(HashedCustomer.customer_id == customer.id)
|
|
||||||
)
|
|
||||||
hashed_customer = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if hashed_customer:
|
|
||||||
# Update existing hashed customer
|
|
||||||
new_hashed = customer.create_hashed_customer()
|
|
||||||
hashed_customer.hashed_email = new_hashed.hashed_email
|
|
||||||
hashed_customer.hashed_phone = new_hashed.hashed_phone
|
|
||||||
hashed_customer.hashed_given_name = new_hashed.hashed_given_name
|
|
||||||
hashed_customer.hashed_surname = new_hashed.hashed_surname
|
|
||||||
hashed_customer.hashed_city = new_hashed.hashed_city
|
|
||||||
hashed_customer.hashed_postal_code = new_hashed.hashed_postal_code
|
|
||||||
hashed_customer.hashed_country_code = new_hashed.hashed_country_code
|
|
||||||
hashed_customer.hashed_gender = new_hashed.hashed_gender
|
|
||||||
hashed_customer.hashed_birth_date = new_hashed.hashed_birth_date
|
|
||||||
else:
|
|
||||||
# Create new hashed customer if it doesn't exist
|
|
||||||
hashed_customer = customer.create_hashed_customer()
|
|
||||||
hashed_customer.created_at = datetime.now(UTC)
|
|
||||||
self.session.add(hashed_customer)
|
|
||||||
|
|
||||||
if auto_commit:
|
if auto_commit:
|
||||||
await self.session.commit()
|
await self.session.commit()
|
||||||
@@ -200,26 +179,27 @@ class CustomerService:
|
|||||||
# Create new customer (either no contact_id or customer doesn't exist)
|
# Create new customer (either no contact_id or customer doesn't exist)
|
||||||
return await self.create_customer(customer_data, auto_commit=auto_commit)
|
return await self.create_customer(customer_data, auto_commit=auto_commit)
|
||||||
|
|
||||||
async def get_hashed_customer(self, customer_id: int) -> HashedCustomer | None:
|
async def get_customer(self, customer_id: int) -> Customer | None:
|
||||||
"""Get the hashed version of a customer.
|
"""Get the hashed version of a customer.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
customer_id: The customer ID
|
customer_id: The customer ID
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
HashedCustomer instance if found, None otherwise
|
Customer instance if found, None otherwise
|
||||||
|
|
||||||
"""
|
"""
|
||||||
result = await self.session.execute(
|
result = await self.session.execute(
|
||||||
select(HashedCustomer).where(HashedCustomer.customer_id == customer_id)
|
select(Customer).where(Customer.id == customer_id)
|
||||||
)
|
)
|
||||||
return result.scalar_one_or_none()
|
return result.scalar_one_or_none()
|
||||||
|
|
||||||
async def hash_existing_customers(self) -> int:
|
async def hash_existing_customers(self) -> int:
|
||||||
"""Hash all existing customers that don't have a hashed version yet.
|
"""Hash all existing customers that don't have hashed fields populated yet.
|
||||||
|
|
||||||
This is useful for backfilling hashed data for customers created
|
This is useful for backfilling hashed data for customers created
|
||||||
before the hashing system was implemented.
|
before the hashing system was implemented, or after migrating from
|
||||||
|
the separate hashed_customers table.
|
||||||
|
|
||||||
Also validates and sanitizes customer data (e.g., normalizes country
|
Also validates and sanitizes customer data (e.g., normalizes country
|
||||||
codes to uppercase). Customers with invalid data that cannot be fixed
|
codes to uppercase). Customers with invalid data that cannot be fixed
|
||||||
@@ -229,17 +209,16 @@ class CustomerService:
|
|||||||
Number of customers that were hashed
|
Number of customers that were hashed
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# Get all customers
|
# Get all customers without hashed data
|
||||||
result = await self.session.execute(select(Customer))
|
result = await self.session.execute(
|
||||||
|
select(Customer).where(Customer.hashed_email.is_(None))
|
||||||
|
)
|
||||||
customers = result.scalars().all()
|
customers = result.scalars().all()
|
||||||
|
|
||||||
hashed_count = 0
|
hashed_count = 0
|
||||||
skipped_count = 0
|
skipped_count = 0
|
||||||
|
|
||||||
for customer in customers:
|
for customer in customers:
|
||||||
# Check if this customer already has a hashed version
|
|
||||||
existing_hashed = await self.get_hashed_customer(customer.id)
|
|
||||||
if not existing_hashed:
|
|
||||||
# Validate and sanitize customer data before hashing
|
# Validate and sanitize customer data before hashing
|
||||||
customer_dict = {
|
customer_dict = {
|
||||||
"given_name": customer.given_name,
|
"given_name": customer.given_name,
|
||||||
@@ -271,10 +250,13 @@ class CustomerService:
|
|||||||
if hasattr(customer, key):
|
if hasattr(customer, key):
|
||||||
setattr(customer, key, value)
|
setattr(customer, key, value)
|
||||||
|
|
||||||
# Create hashed version with sanitized data
|
# Update hashed fields with sanitized data
|
||||||
hashed_customer = customer.create_hashed_customer()
|
customer.update_hashed_fields()
|
||||||
hashed_customer.created_at = datetime.now(UTC)
|
|
||||||
self.session.add(hashed_customer)
|
# Set created_at if not already set
|
||||||
|
if not customer.created_at:
|
||||||
|
customer.created_at = datetime.now(UTC)
|
||||||
|
|
||||||
hashed_count += 1
|
hashed_count += 1
|
||||||
|
|
||||||
except ValidationError as e:
|
except ValidationError as e:
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ from sqlalchemy.ext.asyncio import (
|
|||||||
async_sessionmaker,
|
async_sessionmaker,
|
||||||
create_async_engine,
|
create_async_engine,
|
||||||
)
|
)
|
||||||
from sqlalchemy.orm import backref, declarative_base, relationship
|
from sqlalchemy.orm import backref, declarative_base, foreign, relationship
|
||||||
|
|
||||||
from .const import WebhookStatus
|
from .const import WebhookStatus
|
||||||
from .logging_config import get_logger
|
from .logging_config import get_logger
|
||||||
@@ -311,6 +311,20 @@ class Customer(Base):
|
|||||||
language = Column(String)
|
language = Column(String)
|
||||||
address_catalog = Column(Boolean) # Added for XML
|
address_catalog = Column(Boolean) # Added for XML
|
||||||
name_title = Column(String) # Added for XML
|
name_title = Column(String) # Added for XML
|
||||||
|
|
||||||
|
# Hashed fields for Meta Conversion API (SHA256)
|
||||||
|
hashed_email = Column(String(64))
|
||||||
|
hashed_phone = Column(String(64))
|
||||||
|
hashed_given_name = Column(String(64))
|
||||||
|
hashed_surname = Column(String(64))
|
||||||
|
hashed_city = Column(String(64))
|
||||||
|
hashed_postal_code = Column(String(64))
|
||||||
|
hashed_country_code = Column(String(64))
|
||||||
|
hashed_gender = Column(String(64))
|
||||||
|
hashed_birth_date = Column(String(64))
|
||||||
|
|
||||||
|
created_at = Column(DateTime(timezone=True))
|
||||||
|
|
||||||
reservations = relationship("Reservation", back_populates="customer")
|
reservations = relationship("Reservation", back_populates="customer")
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
@@ -335,53 +349,19 @@ class Customer(Base):
|
|||||||
# SHA256 hash
|
# SHA256 hash
|
||||||
return hashlib.sha256(normalized.encode("utf-8")).hexdigest()
|
return hashlib.sha256(normalized.encode("utf-8")).hexdigest()
|
||||||
|
|
||||||
def create_hashed_customer(self):
|
def update_hashed_fields(self):
|
||||||
"""Create a HashedCustomer instance from this Customer."""
|
"""Update the hashed fields based on current plaintext values."""
|
||||||
return HashedCustomer(
|
self.hashed_email = self._normalize_and_hash(self.email_address)
|
||||||
customer_id=self.id,
|
self.hashed_phone = self._normalize_and_hash(self.phone)
|
||||||
contact_id=self.contact_id,
|
self.hashed_given_name = self._normalize_and_hash(self.given_name)
|
||||||
hashed_email=self._normalize_and_hash(self.email_address),
|
self.hashed_surname = self._normalize_and_hash(self.surname)
|
||||||
hashed_phone=self._normalize_and_hash(self.phone),
|
self.hashed_city = self._normalize_and_hash(self.city_name)
|
||||||
hashed_given_name=self._normalize_and_hash(self.given_name),
|
self.hashed_postal_code = self._normalize_and_hash(self.postal_code)
|
||||||
hashed_surname=self._normalize_and_hash(self.surname),
|
self.hashed_country_code = self._normalize_and_hash(self.country_code)
|
||||||
hashed_city=self._normalize_and_hash(self.city_name),
|
self.hashed_gender = self._normalize_and_hash(self.gender)
|
||||||
hashed_postal_code=self._normalize_and_hash(self.postal_code),
|
self.hashed_birth_date = self._normalize_and_hash(self.birth_date)
|
||||||
hashed_country_code=self._normalize_and_hash(self.country_code),
|
|
||||||
hashed_gender=self._normalize_and_hash(self.gender),
|
|
||||||
hashed_birth_date=self._normalize_and_hash(self.birth_date),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class HashedCustomer(Base):
|
|
||||||
"""Hashed customer data for Meta Conversion API.
|
|
||||||
|
|
||||||
Stores SHA256 hashed versions of customer PII according to Meta's requirements.
|
|
||||||
This allows sending conversion events without exposing raw customer data.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "hashed_customers"
|
|
||||||
id = Column(Integer, primary_key=True)
|
|
||||||
customer_id = Column(
|
|
||||||
Integer,
|
|
||||||
ForeignKey("customers.id", ondelete="SET NULL"),
|
|
||||||
unique=True,
|
|
||||||
nullable=True,
|
|
||||||
)
|
|
||||||
contact_id = Column(String, unique=True) # Keep unhashed for reference
|
|
||||||
hashed_email = Column(String(64)) # SHA256 produces 64 hex chars
|
|
||||||
hashed_phone = Column(String(64))
|
|
||||||
hashed_given_name = Column(String(64))
|
|
||||||
hashed_surname = Column(String(64))
|
|
||||||
hashed_city = Column(String(64))
|
|
||||||
hashed_postal_code = Column(String(64))
|
|
||||||
hashed_country_code = Column(String(64))
|
|
||||||
hashed_gender = Column(String(64))
|
|
||||||
hashed_birth_date = Column(String(64))
|
|
||||||
created_at = Column(DateTime(timezone=True))
|
|
||||||
|
|
||||||
customer = relationship(
|
|
||||||
"Customer", backref=backref("hashed_version", uselist=False, lazy="joined")
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ConversionGuest(Base):
|
class ConversionGuest(Base):
|
||||||
@@ -420,23 +400,30 @@ class ConversionGuest(Base):
|
|||||||
hashed_country_code = Column(String(64))
|
hashed_country_code = Column(String(64))
|
||||||
hashed_birth_date = Column(String(64))
|
hashed_birth_date = Column(String(64))
|
||||||
|
|
||||||
# Matched customer reference (nullable, filled after matching)
|
|
||||||
hashed_customer_id = Column(
|
|
||||||
Integer, ForeignKey("hashed_customers.id"), nullable=True, index=True
|
|
||||||
)
|
|
||||||
|
|
||||||
# Guest classification
|
# Guest classification
|
||||||
is_regular = Column(
|
is_regular = Column(
|
||||||
Boolean, default=False
|
Boolean, default=False
|
||||||
) # True if guest has many prior stays before appearing in our reservations
|
) # True if guest has many prior stays before appearing in our reservations
|
||||||
|
# Guest classification
|
||||||
|
is_awareness_guest = Column(
|
||||||
|
Boolean, default=False
|
||||||
|
) # True if guests first stay was from our campaigns
|
||||||
|
|
||||||
|
|
||||||
# Metadata
|
# Metadata
|
||||||
first_seen = Column(DateTime(timezone=True))
|
first_seen = Column(DateTime(timezone=True))
|
||||||
last_seen = Column(DateTime(timezone=True))
|
last_seen = Column(DateTime(timezone=True))
|
||||||
|
|
||||||
# Relationships
|
# Relationships
|
||||||
conversions = relationship("Conversion", back_populates="guest")
|
conversions = relationship(
|
||||||
hashed_customer = relationship("HashedCustomer", backref="conversion_guests")
|
"Conversion",
|
||||||
|
back_populates="guest",
|
||||||
|
foreign_keys="[Conversion.hotel_id, Conversion.guest_id]",
|
||||||
|
primaryjoin="and_(ConversionGuest.hotel_id == foreign(Conversion.hotel_id), "
|
||||||
|
"ConversionGuest.guest_id == foreign(Conversion.guest_id))",
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _normalize_and_hash(value):
|
def _normalize_and_hash(value):
|
||||||
@@ -517,9 +504,6 @@ class Reservation(Base):
|
|||||||
__tablename__ = "reservations"
|
__tablename__ = "reservations"
|
||||||
id = Column(Integer, primary_key=True)
|
id = Column(Integer, primary_key=True)
|
||||||
customer_id = Column(Integer, ForeignKey("customers.id", ondelete="SET NULL"))
|
customer_id = Column(Integer, ForeignKey("customers.id", ondelete="SET NULL"))
|
||||||
hashed_customer_id = Column(
|
|
||||||
Integer, ForeignKey("hashed_customers.id", ondelete="CASCADE")
|
|
||||||
)
|
|
||||||
unique_id = Column(String, unique=True)
|
unique_id = Column(String, unique=True)
|
||||||
md5_unique_id = Column(String(32), unique=True) # max length 32 guaranteed
|
md5_unique_id = Column(String(32), unique=True) # max length 32 guaranteed
|
||||||
start_date = Column(Date)
|
start_date = Column(Date)
|
||||||
@@ -541,15 +525,14 @@ class Reservation(Base):
|
|||||||
# Advertising account IDs (stored conditionally based on fbclid/gclid presence)
|
# Advertising account IDs (stored conditionally based on fbclid/gclid presence)
|
||||||
meta_account_id = Column(String)
|
meta_account_id = Column(String)
|
||||||
google_account_id = Column(String)
|
google_account_id = Column(String)
|
||||||
# Add hotel_code and hotel_name for XML
|
# Add hotel_id and hotel_name for XML
|
||||||
hotel_code = Column(String)
|
hotel_id = Column(String, ForeignKey("hotels.hotel_id", ondelete="CASCADE"))
|
||||||
hotel_name = Column(String)
|
hotel_name = Column(String)
|
||||||
# RoomTypes fields (optional)
|
# RoomTypes fields (optional)
|
||||||
room_type_code = Column(String)
|
room_type_code = Column(String)
|
||||||
room_classification_code = Column(String)
|
room_classification_code = Column(String)
|
||||||
room_type = Column(String)
|
room_type = Column(String)
|
||||||
customer = relationship("Customer", back_populates="reservations")
|
customer = relationship("Customer", back_populates="reservations")
|
||||||
hashed_customer = relationship("HashedCustomer", backref="reservations")
|
|
||||||
|
|
||||||
|
|
||||||
# Table for tracking acknowledged requests by client
|
# Table for tracking acknowledged requests by client
|
||||||
@@ -569,7 +552,7 @@ class AckedRequest(Base):
|
|||||||
) # Username of the client making the request
|
) # Username of the client making the request
|
||||||
unique_id = Column(
|
unique_id = Column(
|
||||||
String, index=True
|
String, index=True
|
||||||
) # Should match Reservation.form_id or another unique field
|
) # Matches the md5_unique_id in Reservation
|
||||||
timestamp = Column(DateTime(timezone=True))
|
timestamp = Column(DateTime(timezone=True))
|
||||||
|
|
||||||
|
|
||||||
@@ -601,9 +584,6 @@ class Conversion(Base):
|
|||||||
Integer, ForeignKey("reservations.id"), nullable=True, index=True
|
Integer, ForeignKey("reservations.id"), nullable=True, index=True
|
||||||
)
|
)
|
||||||
customer_id = Column(Integer, ForeignKey("customers.id"), nullable=True, index=True)
|
customer_id = Column(Integer, ForeignKey("customers.id"), nullable=True, index=True)
|
||||||
hashed_customer_id = Column(
|
|
||||||
Integer, ForeignKey("hashed_customers.id"), nullable=True, index=True
|
|
||||||
)
|
|
||||||
|
|
||||||
# Reservation metadata from XML
|
# Reservation metadata from XML
|
||||||
hotel_id = Column(
|
hotel_id = Column(
|
||||||
@@ -646,23 +626,28 @@ class Conversion(Base):
|
|||||||
created_at = Column(DateTime(timezone=True)) # When this record was imported
|
created_at = Column(DateTime(timezone=True)) # When this record was imported
|
||||||
updated_at = Column(DateTime(timezone=True)) # When this record was last updated
|
updated_at = Column(DateTime(timezone=True)) # When this record was last updated
|
||||||
|
|
||||||
# Composite foreign key constraint for ConversionGuest (hotel_id, guest_id)
|
# Table constraints
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
|
UniqueConstraint(
|
||||||
|
"hotel_id", "pms_reservation_id", name="uq_conversion_hotel_reservation"
|
||||||
|
),
|
||||||
ForeignKeyConstraint(
|
ForeignKeyConstraint(
|
||||||
["hotel_id", "guest_id"],
|
["hotel_id", "guest_id"],
|
||||||
["conversion_guests.hotel_id", "conversion_guests.guest_id"],
|
["conversion_guests.hotel_id", "conversion_guests.guest_id"],
|
||||||
ondelete="SET NULL",
|
name="fk_conversions_guest",
|
||||||
),
|
|
||||||
UniqueConstraint(
|
|
||||||
"hotel_id", "pms_reservation_id", name="uq_conversion_hotel_reservation"
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Relationships
|
# Relationships
|
||||||
reservation = relationship("Reservation", backref="conversions")
|
reservation = relationship("Reservation", backref="conversions")
|
||||||
customer = relationship("Customer", backref="conversions")
|
customer = relationship("Customer", backref="conversions")
|
||||||
hashed_customer = relationship("HashedCustomer", backref="conversions")
|
guest = relationship(
|
||||||
guest = relationship("ConversionGuest", back_populates="conversions")
|
"ConversionGuest",
|
||||||
|
back_populates="conversions",
|
||||||
|
foreign_keys="[Conversion.hotel_id, Conversion.guest_id]",
|
||||||
|
primaryjoin="and_(Conversion.hotel_id == ConversionGuest.hotel_id, "
|
||||||
|
"Conversion.guest_id == ConversionGuest.guest_id)",
|
||||||
|
)
|
||||||
conversion_rooms = relationship(
|
conversion_rooms = relationship(
|
||||||
"ConversionRoom", back_populates="conversion", cascade="all, delete-orphan"
|
"ConversionRoom", back_populates="conversion", cascade="all, delete-orphan"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -115,7 +115,7 @@ async def backfill_advertising_account_ids(
|
|||||||
sql = text(
|
sql = text(
|
||||||
"UPDATE reservations "
|
"UPDATE reservations "
|
||||||
"SET meta_account_id = :meta_account "
|
"SET meta_account_id = :meta_account "
|
||||||
"WHERE hotel_code = :hotel_id "
|
"WHERE hotel_id = :hotel_id "
|
||||||
"AND fbclid IS NOT NULL "
|
"AND fbclid IS NOT NULL "
|
||||||
"AND fbclid != '' "
|
"AND fbclid != '' "
|
||||||
"AND (meta_account_id IS NULL OR meta_account_id = '')"
|
"AND (meta_account_id IS NULL OR meta_account_id = '')"
|
||||||
@@ -141,7 +141,7 @@ async def backfill_advertising_account_ids(
|
|||||||
sql = text(
|
sql = text(
|
||||||
"UPDATE reservations "
|
"UPDATE reservations "
|
||||||
"SET google_account_id = :google_account "
|
"SET google_account_id = :google_account "
|
||||||
"WHERE hotel_code = :hotel_id "
|
"WHERE hotel_id = :hotel_id "
|
||||||
"AND gclid IS NOT NULL "
|
"AND gclid IS NOT NULL "
|
||||||
"AND gclid != '' "
|
"AND gclid != '' "
|
||||||
"AND (google_account_id IS NULL OR google_account_id = '')"
|
"AND (google_account_id IS NULL OR google_account_id = '')"
|
||||||
@@ -215,7 +215,7 @@ async def backfill_acked_requests_username(
|
|||||||
UPDATE acked_requests
|
UPDATE acked_requests
|
||||||
SET username = :username
|
SET username = :username
|
||||||
WHERE unique_id IN (
|
WHERE unique_id IN (
|
||||||
SELECT md5_unique_id FROM reservations WHERE hotel_code = :hotel_id
|
SELECT md5_unique_id FROM reservations WHERE hotel_id = :hotel_id
|
||||||
)
|
)
|
||||||
AND username IS NULL
|
AND username IS NULL
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -523,10 +523,10 @@ class ReservationStatsCollector:
|
|||||||
async with self.async_sessionmaker() as session:
|
async with self.async_sessionmaker() as session:
|
||||||
# Query reservations created in the reporting period
|
# Query reservations created in the reporting period
|
||||||
result = await session.execute(
|
result = await session.execute(
|
||||||
select(Reservation.hotel_code, func.count(Reservation.id))
|
select(Reservation.hotel_id, func.count(Reservation.id))
|
||||||
.where(Reservation.created_at >= period_start)
|
.where(Reservation.created_at >= period_start)
|
||||||
.where(Reservation.created_at < period_end)
|
.where(Reservation.created_at < period_end)
|
||||||
.group_by(Reservation.hotel_code)
|
.group_by(Reservation.hotel_id)
|
||||||
)
|
)
|
||||||
|
|
||||||
hotel_counts = dict(result.all())
|
hotel_counts = dict(result.all())
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ from typing import Optional
|
|||||||
from sqlalchemy import and_, select
|
from sqlalchemy import and_, select
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
from .db import AckedRequest, Customer, HashedCustomer, Reservation
|
from .db import AckedRequest, Customer, Reservation
|
||||||
from .schemas import ReservationData
|
from .schemas import ReservationData
|
||||||
|
|
||||||
|
|
||||||
@@ -64,17 +64,6 @@ class ReservationService:
|
|||||||
reservation_data, customer_id
|
reservation_data, customer_id
|
||||||
)
|
)
|
||||||
|
|
||||||
# Automatically populate hashed_customer_id from the customer
|
|
||||||
# Since hashed_customer is always created when a customer is created,
|
|
||||||
# we can get it by querying for the hashed_customer with matching customer_id
|
|
||||||
hashed_customer_result = await self.session.execute(
|
|
||||||
select(HashedCustomer).where(
|
|
||||||
HashedCustomer.customer_id == customer_id
|
|
||||||
)
|
|
||||||
)
|
|
||||||
hashed_customer = hashed_customer_result.scalar_one_or_none()
|
|
||||||
if hashed_customer:
|
|
||||||
reservation.hashed_customer_id = hashed_customer.id
|
|
||||||
|
|
||||||
self.session.add(reservation)
|
self.session.add(reservation)
|
||||||
|
|
||||||
@@ -181,7 +170,7 @@ class ReservationService:
|
|||||||
if end_date:
|
if end_date:
|
||||||
filters.append(Reservation.created_at <= end_date)
|
filters.append(Reservation.created_at <= end_date)
|
||||||
if hotel_code:
|
if hotel_code:
|
||||||
filters.append(Reservation.hotel_code == hotel_code)
|
filters.append(Reservation.hotel_id == hotel_code)
|
||||||
|
|
||||||
if filters:
|
if filters:
|
||||||
query = query.where(and_(*filters))
|
query = query.where(and_(*filters))
|
||||||
|
|||||||
@@ -131,7 +131,7 @@ class ReservationData(BaseModel):
|
|||||||
num_adults: int = Field(..., ge=1)
|
num_adults: int = Field(..., ge=1)
|
||||||
num_children: int = Field(0, ge=0, le=10)
|
num_children: int = Field(0, ge=0, le=10)
|
||||||
children_ages: list[int] = Field(default_factory=list)
|
children_ages: list[int] = Field(default_factory=list)
|
||||||
hotel_code: str = Field(..., min_length=1, max_length=50)
|
hotel_id: str = Field(..., min_length=1, max_length=50)
|
||||||
hotel_name: str | None = Field(None, max_length=200)
|
hotel_name: str | None = Field(None, max_length=200)
|
||||||
offer: str | None = Field(None, max_length=500)
|
offer: str | None = Field(None, max_length=500)
|
||||||
user_comment: str | None = Field(None, max_length=2000)
|
user_comment: str | None = Field(None, max_length=2000)
|
||||||
@@ -562,7 +562,6 @@ class ConversionData(BaseModel):
|
|||||||
# Foreign key references (nullable - matched after creation)
|
# Foreign key references (nullable - matched after creation)
|
||||||
reservation_id: int | None = Field(None, gt=0)
|
reservation_id: int | None = Field(None, gt=0)
|
||||||
customer_id: int | None = Field(None, gt=0)
|
customer_id: int | None = Field(None, gt=0)
|
||||||
hashed_customer_id: int | None = Field(None, gt=0)
|
|
||||||
|
|
||||||
# Required reservation metadata from PMS
|
# Required reservation metadata from PMS
|
||||||
hotel_id: str = Field(..., min_length=1, max_length=50)
|
hotel_id: str = Field(..., min_length=1, max_length=50)
|
||||||
@@ -591,7 +590,7 @@ class ConversionData(BaseModel):
|
|||||||
|
|
||||||
@field_validator(
|
@field_validator(
|
||||||
"pms_reservation_id", "guest_id", "reservation_id", "customer_id",
|
"pms_reservation_id", "guest_id", "reservation_id", "customer_id",
|
||||||
"hashed_customer_id", mode="before"
|
mode="before"
|
||||||
)
|
)
|
||||||
@classmethod
|
@classmethod
|
||||||
def convert_int_fields(cls, v: Any) -> int | None:
|
def convert_int_fields(cls, v: Any) -> int | None:
|
||||||
|
|||||||
@@ -51,7 +51,6 @@ from alpine_bits_python.db import (
|
|||||||
AckedRequest,
|
AckedRequest,
|
||||||
Base,
|
Base,
|
||||||
Customer,
|
Customer,
|
||||||
HashedCustomer,
|
|
||||||
Reservation,
|
Reservation,
|
||||||
get_database_url,
|
get_database_url,
|
||||||
)
|
)
|
||||||
@@ -306,7 +305,7 @@ async def migrate_data(
|
|||||||
user_comment=reservation.user_comment,
|
user_comment=reservation.user_comment,
|
||||||
fbclid=reservation.fbclid,
|
fbclid=reservation.fbclid,
|
||||||
gclid=reservation.gclid,
|
gclid=reservation.gclid,
|
||||||
hotel_code=reservation.hotel_code,
|
hotel_code=reservation.hotel_id,
|
||||||
hotel_name=reservation.hotel_name,
|
hotel_name=reservation.hotel_name,
|
||||||
room_type_code=reservation.room_type_code,
|
room_type_code=reservation.room_type_code,
|
||||||
room_classification_code=reservation.room_classification_code,
|
room_classification_code=reservation.room_classification_code,
|
||||||
|
|||||||
@@ -203,7 +203,7 @@ async def process_wix_form_submission(
|
|||||||
"name_title": None,
|
"name_title": None,
|
||||||
}
|
}
|
||||||
|
|
||||||
# This automatically creates/updates both Customer and HashedCustomer
|
# This automatically creates/updates Customer
|
||||||
db_customer = await customer_service.get_or_create_customer(customer_data)
|
db_customer = await customer_service.get_or_create_customer(customer_data)
|
||||||
|
|
||||||
# Determine hotel_code and hotel_name
|
# Determine hotel_code and hotel_name
|
||||||
@@ -247,7 +247,7 @@ async def process_wix_form_submission(
|
|||||||
num_adults=num_adults,
|
num_adults=num_adults,
|
||||||
num_children=num_children,
|
num_children=num_children,
|
||||||
children_ages=children_ages,
|
children_ages=children_ages,
|
||||||
hotel_code=hotel_code,
|
hotel_id=hotel_code,
|
||||||
hotel_name=hotel_name,
|
hotel_name=hotel_name,
|
||||||
offer=offer,
|
offer=offer,
|
||||||
created_at=submissionTime,
|
created_at=submissionTime,
|
||||||
@@ -575,7 +575,7 @@ async def process_generic_webhook_submission(
|
|||||||
"num_adults": num_adults,
|
"num_adults": num_adults,
|
||||||
"num_children": num_children,
|
"num_children": num_children,
|
||||||
"children_ages": children_ages,
|
"children_ages": children_ages,
|
||||||
"hotel_code": hotel_code,
|
"hotel_id": hotel_code,
|
||||||
"hotel_name": hotel_name,
|
"hotel_name": hotel_name,
|
||||||
"offer": selected_offers_str,
|
"offer": selected_offers_str,
|
||||||
"utm_source": utm_source,
|
"utm_source": utm_source,
|
||||||
|
|||||||
@@ -59,7 +59,7 @@ async def load_test_data_from_db():
|
|||||||
result = []
|
result = []
|
||||||
for reservation, customer in reservations_with_customers:
|
for reservation, customer in reservations_with_customers:
|
||||||
# Get hashed customer data
|
# Get hashed customer data
|
||||||
hashed_customer = await customer_service.get_hashed_customer(customer.id)
|
hashed_customer = await customer_service.get_customer(customer.id)
|
||||||
|
|
||||||
result.append(
|
result.append(
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -98,7 +98,7 @@ def sample_reservation(sample_customer):
|
|||||||
user_comment="Late check-in requested",
|
user_comment="Late check-in requested",
|
||||||
fbclid="PAZXh0bgNhZW0BMABhZGlkAasmYBTNE3QBp1jWuJ9zIpfEGRJMP63fMAMI405yvG5EtH-OT0PxSkAbBJaudFHR6cMtkdHu_aem_fopaFtECyVPNW9fmWfEkyA",
|
fbclid="PAZXh0bgNhZW0BMABhZGlkAasmYBTNE3QBp1jWuJ9zIpfEGRJMP63fMAMI405yvG5EtH-OT0PxSkAbBJaudFHR6cMtkdHu_aem_fopaFtECyVPNW9fmWfEkyA",
|
||||||
gclid="",
|
gclid="",
|
||||||
hotel_code="HOTEL123",
|
hotel_id="HOTEL123",
|
||||||
hotel_name="Alpine Paradise Resort",
|
hotel_name="Alpine Paradise Resort",
|
||||||
)
|
)
|
||||||
data = reservation.model_dump(exclude_none=True)
|
data = reservation.model_dump(exclude_none=True)
|
||||||
@@ -136,7 +136,7 @@ def minimal_reservation(minimal_customer):
|
|||||||
num_adults=1,
|
num_adults=1,
|
||||||
num_children=0,
|
num_children=0,
|
||||||
children_ages=[],
|
children_ages=[],
|
||||||
hotel_code="HOTEL123",
|
hotel_id="HOTEL123",
|
||||||
created_at=datetime(2024, 12, 2, 12, 0, 0, tzinfo=UTC),
|
created_at=datetime(2024, 12, 2, 12, 0, 0, tzinfo=UTC),
|
||||||
hotel_name="Alpine Paradise Resort",
|
hotel_name="Alpine Paradise Resort",
|
||||||
)
|
)
|
||||||
@@ -403,7 +403,7 @@ class TestEdgeCases:
|
|||||||
num_adults=1,
|
num_adults=1,
|
||||||
num_children=0,
|
num_children=0,
|
||||||
children_ages="",
|
children_ages="",
|
||||||
hotel_code="HOTEL123",
|
hotel_id="HOTEL123",
|
||||||
created_at=datetime.now(UTC),
|
created_at=datetime.now(UTC),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -434,7 +434,7 @@ class TestEdgeCases:
|
|||||||
num_adults=2,
|
num_adults=2,
|
||||||
num_children=0,
|
num_children=0,
|
||||||
children_ages=[],
|
children_ages=[],
|
||||||
hotel_code="HOTEL123",
|
hotel_id="HOTEL123",
|
||||||
created_at=datetime.now(UTC),
|
created_at=datetime.now(UTC),
|
||||||
utm_source="facebook",
|
utm_source="facebook",
|
||||||
utm_medium="social",
|
utm_medium="social",
|
||||||
@@ -851,7 +851,7 @@ class TestAcknowledgments:
|
|||||||
num_adults=2,
|
num_adults=2,
|
||||||
num_children=0,
|
num_children=0,
|
||||||
children_ages=[],
|
children_ages=[],
|
||||||
hotel_code="HOTEL123",
|
hotel_id="HOTEL123",
|
||||||
hotel_name="Alpine Paradise Resort",
|
hotel_name="Alpine Paradise Resort",
|
||||||
created_at=datetime(2024, 11, 1, 12, 0, 0, tzinfo=UTC),
|
created_at=datetime(2024, 11, 1, 12, 0, 0, tzinfo=UTC),
|
||||||
)
|
)
|
||||||
@@ -863,7 +863,7 @@ class TestAcknowledgments:
|
|||||||
num_adults=2,
|
num_adults=2,
|
||||||
num_children=1,
|
num_children=1,
|
||||||
children_ages=[10],
|
children_ages=[10],
|
||||||
hotel_code="HOTEL123",
|
hotel_id="HOTEL123",
|
||||||
hotel_name="Alpine Paradise Resort",
|
hotel_name="Alpine Paradise Resort",
|
||||||
created_at=datetime(2024, 11, 15, 10, 0, 0, tzinfo=UTC),
|
created_at=datetime(2024, 11, 15, 10, 0, 0, tzinfo=UTC),
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -523,7 +523,7 @@ class TestGenericWebhookEndpoint:
|
|||||||
(r for r in reservations if r.customer_id == customer.id), None
|
(r for r in reservations if r.customer_id == customer.id), None
|
||||||
)
|
)
|
||||||
assert reservation is not None, "Reservation should be created"
|
assert reservation is not None, "Reservation should be created"
|
||||||
assert reservation.hotel_code == "HOTEL123"
|
assert reservation.hotel_id == "HOTEL123"
|
||||||
assert reservation.hotel_name == "Test Hotel"
|
assert reservation.hotel_name == "Test Hotel"
|
||||||
assert reservation.num_adults == 2
|
assert reservation.num_adults == 2
|
||||||
assert reservation.num_children == 1
|
assert reservation.num_children == 1
|
||||||
@@ -614,7 +614,7 @@ class TestGenericWebhookEndpoint:
|
|||||||
result = await session.execute(select(Reservation))
|
result = await session.execute(select(Reservation))
|
||||||
reservations = result.scalars().all()
|
reservations = result.scalars().all()
|
||||||
reservation = next(
|
reservation = next(
|
||||||
(r for r in reservations if r.hotel_code == "HOTEL123"), None
|
(r for r in reservations if r.hotel_id == "HOTEL123"), None
|
||||||
)
|
)
|
||||||
assert reservation is not None, "Reservation should be created"
|
assert reservation is not None, "Reservation should be created"
|
||||||
assert reservation.num_children == 3
|
assert reservation.num_children == 3
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ import pytest
|
|||||||
import pytest_asyncio
|
import pytest_asyncio
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||||||
|
from sqlalchemy.orm import selectinload
|
||||||
|
|
||||||
from alpine_bits_python.conversion_service import ConversionService
|
from alpine_bits_python.conversion_service import ConversionService
|
||||||
from alpine_bits_python.csv_import import CSVImporter
|
from alpine_bits_python.csv_import import CSVImporter
|
||||||
@@ -142,7 +143,7 @@ class TestConversionServiceWithImportedData:
|
|||||||
|
|
||||||
## Need to check if reservations and customers are now actually available in the db before proceeding
|
## Need to check if reservations and customers are now actually available in the db before proceeding
|
||||||
|
|
||||||
conversion_service = ConversionService(test_db_session)
|
conversion_service = ConversionService(test_db_session, hotel_id="39054_001")
|
||||||
stats = await conversion_service.process_conversion_xml(xml_content)
|
stats = await conversion_service.process_conversion_xml(xml_content)
|
||||||
|
|
||||||
# BASELINE ASSERTIONS:
|
# BASELINE ASSERTIONS:
|
||||||
@@ -224,7 +225,7 @@ class TestConversionServiceWithImportedData:
|
|||||||
# File already has proper XML structure, just use it as-is
|
# File already has proper XML structure, just use it as-is
|
||||||
xml_content = xml_content.strip()
|
xml_content = xml_content.strip()
|
||||||
|
|
||||||
conversion_service = ConversionService(test_db_session)
|
conversion_service = ConversionService(test_db_session, hotel_id="39054_001")
|
||||||
stats = await conversion_service.process_conversion_xml(xml_content)
|
stats = await conversion_service.process_conversion_xml(xml_content)
|
||||||
|
|
||||||
# Verify conversions were created
|
# Verify conversions were created
|
||||||
@@ -300,7 +301,7 @@ class TestConversionServiceWithImportedData:
|
|||||||
# File already has proper XML structure, just use it as-is
|
# File already has proper XML structure, just use it as-is
|
||||||
xml_content = xml_content.strip()
|
xml_content = xml_content.strip()
|
||||||
|
|
||||||
conversion_service = ConversionService(test_db_session)
|
conversion_service = ConversionService(test_db_session, hotel_id="39054_001")
|
||||||
stats = await conversion_service.process_conversion_xml(xml_content)
|
stats = await conversion_service.process_conversion_xml(xml_content)
|
||||||
|
|
||||||
# Verify conversions were processed
|
# Verify conversions were processed
|
||||||
@@ -332,7 +333,7 @@ class TestConversionServiceWithImportedData:
|
|||||||
"""Test ConversionService handles invalid XML gracefully."""
|
"""Test ConversionService handles invalid XML gracefully."""
|
||||||
invalid_xml = "<invalid>unclosed tag"
|
invalid_xml = "<invalid>unclosed tag"
|
||||||
|
|
||||||
conversion_service = ConversionService(test_db_session)
|
conversion_service = ConversionService(test_db_session, hotel_id="39054_001")
|
||||||
|
|
||||||
with pytest.raises(ValueError, match="Invalid XML"):
|
with pytest.raises(ValueError, match="Invalid XML"):
|
||||||
await conversion_service.process_conversion_xml(invalid_xml)
|
await conversion_service.process_conversion_xml(invalid_xml)
|
||||||
@@ -342,7 +343,7 @@ class TestConversionServiceWithImportedData:
|
|||||||
"""Test ConversionService handles empty/minimal XML."""
|
"""Test ConversionService handles empty/minimal XML."""
|
||||||
minimal_xml = '<?xml version="1.0"?><root></root>'
|
minimal_xml = '<?xml version="1.0"?><root></root>'
|
||||||
|
|
||||||
conversion_service = ConversionService(test_db_session)
|
conversion_service = ConversionService(test_db_session, hotel_id="39054_001")
|
||||||
stats = await conversion_service.process_conversion_xml(minimal_xml)
|
stats = await conversion_service.process_conversion_xml(minimal_xml)
|
||||||
|
|
||||||
assert stats["total_reservations"] == 0
|
assert stats["total_reservations"] == 0
|
||||||
@@ -421,7 +422,7 @@ class TestConversionServiceWithImportedData:
|
|||||||
xml_content1 = multi_builder1.build_xml()
|
xml_content1 = multi_builder1.build_xml()
|
||||||
|
|
||||||
# Process first batch
|
# Process first batch
|
||||||
service = ConversionService(test_db_session)
|
service = ConversionService(test_db_session, hotel_id="39054_001")
|
||||||
stats1 = await service.process_conversion_xml(xml_content1)
|
stats1 = await service.process_conversion_xml(xml_content1)
|
||||||
|
|
||||||
assert stats1["total_reservations"] == 2
|
assert stats1["total_reservations"] == 2
|
||||||
@@ -542,6 +543,187 @@ class TestConversionServiceWithImportedData:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestConversionUpdatesAndMatching:
|
||||||
|
"""Tests covering conversion updates and core matching logic."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_reprocessing_conversion_updates_metadata(self, test_db_session):
|
||||||
|
"""Ensure reprocessing a reservation updates metadata instead of duplicating."""
|
||||||
|
def build_xml(
|
||||||
|
*,
|
||||||
|
booking_channel: str,
|
||||||
|
advertising_medium: str,
|
||||||
|
advertising_partner: str,
|
||||||
|
room_number: str,
|
||||||
|
arrival: str,
|
||||||
|
departure: str,
|
||||||
|
revenue: float,
|
||||||
|
) -> str:
|
||||||
|
return f"""<?xml version="1.0"?>
|
||||||
|
<root>
|
||||||
|
<reservation id="2001" hotelID="39054_001" number="A-1" date="2025-01-05"
|
||||||
|
bookingChannel="{booking_channel}"
|
||||||
|
advertisingMedium="{advertising_medium}"
|
||||||
|
advertisingPartner="{advertising_partner}"
|
||||||
|
advertisingCampagne="abc123">
|
||||||
|
<guest id="900" firstName="Casey" lastName="Jordan" email="casey@example.com"/>
|
||||||
|
<roomReservations>
|
||||||
|
<roomReservation roomNumber="{room_number}" arrival="{arrival}" departure="{departure}" status="reserved">
|
||||||
|
<dailySales>
|
||||||
|
<dailySale date="{arrival}" revenueTotal="{revenue}"/>
|
||||||
|
<dailySale date="{departure}" revenueTotal="{revenue}"/>
|
||||||
|
</dailySales>
|
||||||
|
</roomReservation>
|
||||||
|
</roomReservations>
|
||||||
|
</reservation>
|
||||||
|
</root>"""
|
||||||
|
|
||||||
|
first_xml = build_xml(
|
||||||
|
booking_channel="OTA",
|
||||||
|
advertising_medium="META",
|
||||||
|
advertising_partner="cpc",
|
||||||
|
room_number="33",
|
||||||
|
arrival="2025-02-01",
|
||||||
|
departure="2025-02-03",
|
||||||
|
revenue=120.0,
|
||||||
|
)
|
||||||
|
|
||||||
|
service = ConversionService(test_db_session, hotel_id="39054_001")
|
||||||
|
stats_first = await service.process_conversion_xml(first_xml)
|
||||||
|
assert stats_first["total_reservations"] == 1
|
||||||
|
|
||||||
|
result = await test_db_session.execute(
|
||||||
|
select(Conversion)
|
||||||
|
.where(
|
||||||
|
Conversion.hotel_id == "39054_001",
|
||||||
|
Conversion.pms_reservation_id == 2001,
|
||||||
|
)
|
||||||
|
.options(selectinload(Conversion.conversion_rooms))
|
||||||
|
)
|
||||||
|
conversion = result.scalar_one()
|
||||||
|
assert conversion.booking_channel == "OTA"
|
||||||
|
assert conversion.advertising_partner == "cpc"
|
||||||
|
original_room_count = len(conversion.conversion_rooms)
|
||||||
|
assert original_room_count == 1
|
||||||
|
assert conversion.conversion_rooms[0].room_number == "33"
|
||||||
|
|
||||||
|
updated_xml = build_xml(
|
||||||
|
booking_channel="DIRECT",
|
||||||
|
advertising_medium="WEBSITE",
|
||||||
|
advertising_partner="organic",
|
||||||
|
room_number="44",
|
||||||
|
arrival="2025-02-02",
|
||||||
|
departure="2025-02-04",
|
||||||
|
revenue=150.0,
|
||||||
|
)
|
||||||
|
|
||||||
|
stats_second = await service.process_conversion_xml(updated_xml)
|
||||||
|
assert stats_second["total_reservations"] == 1
|
||||||
|
|
||||||
|
test_db_session.expire_all()
|
||||||
|
result = await test_db_session.execute(
|
||||||
|
select(Conversion)
|
||||||
|
.where(
|
||||||
|
Conversion.hotel_id == "39054_001",
|
||||||
|
Conversion.pms_reservation_id == 2001,
|
||||||
|
)
|
||||||
|
.options(selectinload(Conversion.conversion_rooms))
|
||||||
|
)
|
||||||
|
updated_conversion = result.scalar_one()
|
||||||
|
assert updated_conversion.booking_channel == "DIRECT"
|
||||||
|
assert updated_conversion.advertising_medium == "WEBSITE"
|
||||||
|
assert updated_conversion.advertising_partner == "organic"
|
||||||
|
assert len(updated_conversion.conversion_rooms) == 1
|
||||||
|
assert updated_conversion.conversion_rooms[0].room_number == "44"
|
||||||
|
assert updated_conversion.conversion_rooms[0].arrival_date.strftime(
|
||||||
|
"%Y-%m-%d"
|
||||||
|
) == "2025-02-02"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_advertising_match_uses_hashed_email_for_disambiguation(
|
||||||
|
self, test_db_session
|
||||||
|
):
|
||||||
|
"""Ensure hashed email filters ambiguous advertising matches."""
|
||||||
|
# Create two customers/reservations sharing the same click-id prefix
|
||||||
|
customer_a = Customer(
|
||||||
|
given_name="Lara",
|
||||||
|
surname="North",
|
||||||
|
email_address="lara@example.com",
|
||||||
|
contact_id="contact_a",
|
||||||
|
)
|
||||||
|
customer_a.update_hashed_fields()
|
||||||
|
customer_b = Customer(
|
||||||
|
given_name="Mia",
|
||||||
|
surname="West",
|
||||||
|
email_address="mia@example.com",
|
||||||
|
contact_id="contact_b",
|
||||||
|
)
|
||||||
|
customer_b.update_hashed_fields()
|
||||||
|
|
||||||
|
test_db_session.add_all([customer_a, customer_b])
|
||||||
|
await test_db_session.flush()
|
||||||
|
|
||||||
|
reservation_a = Reservation(
|
||||||
|
customer_id=customer_a.id,
|
||||||
|
unique_id="res_a",
|
||||||
|
md5_unique_id="A" * 32,
|
||||||
|
hotel_id="39054_001",
|
||||||
|
fbclid="click-prefix-111",
|
||||||
|
)
|
||||||
|
reservation_b = Reservation(
|
||||||
|
customer_id=customer_b.id,
|
||||||
|
unique_id="res_b",
|
||||||
|
md5_unique_id="B" * 32,
|
||||||
|
hotel_id="39054_001",
|
||||||
|
fbclid="click-prefix-222",
|
||||||
|
)
|
||||||
|
test_db_session.add_all([reservation_a, reservation_b])
|
||||||
|
await test_db_session.commit()
|
||||||
|
|
||||||
|
from tests.helpers import ReservationXMLBuilder
|
||||||
|
|
||||||
|
xml_content = (
|
||||||
|
ReservationXMLBuilder(
|
||||||
|
hotel_id="39054_001",
|
||||||
|
reservation_id="3001",
|
||||||
|
reservation_number="B-1",
|
||||||
|
reservation_date="2025-03-10",
|
||||||
|
advertising_campagne="click-prefix",
|
||||||
|
)
|
||||||
|
.set_guest(
|
||||||
|
guest_id="701",
|
||||||
|
first_name="Mia",
|
||||||
|
last_name="West",
|
||||||
|
email="mia@example.com",
|
||||||
|
)
|
||||||
|
.add_room(
|
||||||
|
arrival="2025-04-01",
|
||||||
|
departure="2025-04-03",
|
||||||
|
room_number="55",
|
||||||
|
status="reserved",
|
||||||
|
revenue_logis_per_day=180.0,
|
||||||
|
)
|
||||||
|
.build_xml()
|
||||||
|
)
|
||||||
|
|
||||||
|
service = ConversionService(test_db_session, hotel_id="39054_001")
|
||||||
|
stats = await service.process_conversion_xml(xml_content)
|
||||||
|
|
||||||
|
result = await test_db_session.execute(
|
||||||
|
select(Conversion)
|
||||||
|
.where(
|
||||||
|
Conversion.hotel_id == "39054_001",
|
||||||
|
Conversion.pms_reservation_id == 3001,
|
||||||
|
)
|
||||||
|
.options(selectinload(Conversion.guest))
|
||||||
|
)
|
||||||
|
conversion = result.scalar_one()
|
||||||
|
assert conversion.reservation_id == reservation_b.id
|
||||||
|
assert conversion.customer_id == customer_b.id
|
||||||
|
assert stats["matched_to_reservation"] == 1
|
||||||
|
assert stats["matched_to_customer"] == 0
|
||||||
|
|
||||||
|
|
||||||
class TestXMLBuilderUsage:
|
class TestXMLBuilderUsage:
|
||||||
"""Demonstrate usage of XML builder helpers for creating test data."""
|
"""Demonstrate usage of XML builder helpers for creating test data."""
|
||||||
|
|
||||||
@@ -577,7 +759,7 @@ class TestXMLBuilderUsage:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Process the XML
|
# Process the XML
|
||||||
service = ConversionService(test_db_session)
|
service = ConversionService(test_db_session, hotel_id="39054_001")
|
||||||
stats = await service.process_conversion_xml(xml_content)
|
stats = await service.process_conversion_xml(xml_content)
|
||||||
|
|
||||||
assert stats["total_reservations"] == 1
|
assert stats["total_reservations"] == 1
|
||||||
@@ -616,7 +798,7 @@ class TestXMLBuilderUsage:
|
|||||||
.build_xml()
|
.build_xml()
|
||||||
)
|
)
|
||||||
|
|
||||||
service = ConversionService(test_db_session)
|
service = ConversionService(test_db_session, hotel_id="39054_001")
|
||||||
stats = await service.process_conversion_xml(xml_content)
|
stats = await service.process_conversion_xml(xml_content)
|
||||||
|
|
||||||
assert stats["total_reservations"] == 1
|
assert stats["total_reservations"] == 1
|
||||||
@@ -677,7 +859,7 @@ class TestXMLBuilderUsage:
|
|||||||
xml_content = multi_builder.build_xml()
|
xml_content = multi_builder.build_xml()
|
||||||
|
|
||||||
# Process the XML
|
# Process the XML
|
||||||
service = ConversionService(test_db_session)
|
service = ConversionService(test_db_session, hotel_id="39054_001")
|
||||||
stats = await service.process_conversion_xml(xml_content)
|
stats = await service.process_conversion_xml(xml_content)
|
||||||
|
|
||||||
assert stats["total_reservations"] == 2
|
assert stats["total_reservations"] == 2
|
||||||
@@ -740,14 +922,13 @@ class TestHashedMatchingLogic:
|
|||||||
test_db_session.add(customer)
|
test_db_session.add(customer)
|
||||||
await test_db_session.flush()
|
await test_db_session.flush()
|
||||||
|
|
||||||
hashed_customer = customer.create_hashed_customer()
|
customer.update_hashed_fields()
|
||||||
test_db_session.add(hashed_customer)
|
|
||||||
await test_db_session.flush()
|
|
||||||
|
|
||||||
reservation = Reservation(
|
reservation = Reservation(
|
||||||
customer_id=customer.id,
|
customer_id=customer.id,
|
||||||
unique_id="res_6",
|
unique_id="res_6",
|
||||||
hotel_code="hotel_1",
|
hotel_id="hotel_1",
|
||||||
)
|
)
|
||||||
test_db_session.add(reservation)
|
test_db_session.add(reservation)
|
||||||
await test_db_session.commit()
|
await test_db_session.commit()
|
||||||
@@ -769,7 +950,7 @@ class TestHashedMatchingLogic:
|
|||||||
</reservation>
|
</reservation>
|
||||||
</root>"""
|
</root>"""
|
||||||
|
|
||||||
service = ConversionService(test_db_session, hotel_id="hotel_1")
|
service = ConversionService(test_db_session, hotel_id="39054_001")
|
||||||
stats = await service.process_conversion_xml(xml_content)
|
stats = await service.process_conversion_xml(xml_content)
|
||||||
|
|
||||||
# Verify conversion was created
|
# Verify conversion was created
|
||||||
@@ -800,17 +981,152 @@ class TestHashedMatchingLogic:
|
|||||||
assert conversion_with_guest.guest.guest_last_name == "Miller"
|
assert conversion_with_guest.guest.guest_last_name == "Miller"
|
||||||
assert conversion_with_guest.guest.guest_email == "david@example.com"
|
assert conversion_with_guest.guest.guest_email == "david@example.com"
|
||||||
|
|
||||||
# Verify conversion_room was created
|
|
||||||
room_result = await test_db_session.execute(
|
|
||||||
select(ConversionRoom).where(ConversionRoom.conversion_id == conversion.id)
|
|
||||||
)
|
|
||||||
rooms = room_result.scalars().all()
|
|
||||||
assert len(rooms) > 0, "ConversionRoom should be created"
|
|
||||||
|
|
||||||
# Verify matching occurred (may or may not have matched depending on data)
|
class TestRegularGuestClassification:
|
||||||
# The important thing is that the records exist
|
"""Tests for the classify_regular_guests helper."""
|
||||||
assert stats["total_reservations"] == 1
|
|
||||||
assert stats["total_daily_sales"] == 1
|
@pytest.mark.asyncio
|
||||||
|
async def test_classify_regular_guest_with_unattributable_history(
|
||||||
|
self, test_db_session
|
||||||
|
):
|
||||||
|
"""Guests with unattributable paying stays become regulars."""
|
||||||
|
from tests.helpers import MultiReservationXMLBuilder, ReservationXMLBuilder
|
||||||
|
|
||||||
|
multi = MultiReservationXMLBuilder()
|
||||||
|
base_builder = ReservationXMLBuilder(
|
||||||
|
hotel_id="39054_001",
|
||||||
|
reservation_id="4001",
|
||||||
|
reservation_number="REG-1",
|
||||||
|
reservation_date="2025-05-01",
|
||||||
|
).set_guest(
|
||||||
|
guest_id="888",
|
||||||
|
first_name="Regular",
|
||||||
|
last_name="Guest",
|
||||||
|
email="regular@example.com",
|
||||||
|
)
|
||||||
|
base_builder.add_room(
|
||||||
|
arrival="2025-06-01",
|
||||||
|
departure="2025-06-03",
|
||||||
|
room_number="71",
|
||||||
|
status="departed",
|
||||||
|
revenue_logis_per_day=220.0,
|
||||||
|
)
|
||||||
|
multi.add_reservation(base_builder)
|
||||||
|
|
||||||
|
second = ReservationXMLBuilder(
|
||||||
|
hotel_id="39054_001",
|
||||||
|
reservation_id="4002",
|
||||||
|
reservation_number="REG-2",
|
||||||
|
reservation_date="2025-05-10",
|
||||||
|
).set_guest(
|
||||||
|
guest_id="888",
|
||||||
|
first_name="Regular",
|
||||||
|
last_name="Guest",
|
||||||
|
email="regular@example.com",
|
||||||
|
)
|
||||||
|
second.add_room(
|
||||||
|
arrival="2025-07-01",
|
||||||
|
departure="2025-07-04",
|
||||||
|
room_number="72",
|
||||||
|
status="departed",
|
||||||
|
revenue_logis_per_day=210.0,
|
||||||
|
)
|
||||||
|
multi.add_reservation(second)
|
||||||
|
|
||||||
|
service = ConversionService(test_db_session, hotel_id="39054_001")
|
||||||
|
await service.process_conversion_xml(multi.build_xml())
|
||||||
|
|
||||||
|
stats = await service.classify_regular_guests(updated_within_hours=None)
|
||||||
|
assert stats["regular"] == 1
|
||||||
|
|
||||||
|
guest = await test_db_session.execute(
|
||||||
|
select(ConversionGuest).where(
|
||||||
|
ConversionGuest.hotel_id == "39054_001",
|
||||||
|
ConversionGuest.guest_id == 888,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
guest_record = guest.scalar_one()
|
||||||
|
assert guest_record.is_regular is True
|
||||||
|
assert guest_record.is_awareness_guest is False
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_classify_awareness_guest_when_first_stay_attributable(
|
||||||
|
self, test_db_session
|
||||||
|
):
|
||||||
|
"""If the earliest paying stay is attributable, mark awareness guests."""
|
||||||
|
from tests.helpers import MultiReservationXMLBuilder, ReservationXMLBuilder
|
||||||
|
|
||||||
|
multi = MultiReservationXMLBuilder()
|
||||||
|
first = ReservationXMLBuilder(
|
||||||
|
hotel_id="39054_001",
|
||||||
|
reservation_id="4101",
|
||||||
|
reservation_number="AW-1",
|
||||||
|
reservation_date="2025-08-01",
|
||||||
|
).set_guest(
|
||||||
|
guest_id="889",
|
||||||
|
first_name="Aware",
|
||||||
|
last_name="Guest",
|
||||||
|
email="aware@example.com",
|
||||||
|
)
|
||||||
|
first.add_room(
|
||||||
|
arrival="2025-09-01",
|
||||||
|
departure="2025-09-03",
|
||||||
|
room_number="81",
|
||||||
|
status="departed",
|
||||||
|
revenue_logis_per_day=250.0,
|
||||||
|
)
|
||||||
|
multi.add_reservation(first)
|
||||||
|
|
||||||
|
second = ReservationXMLBuilder(
|
||||||
|
hotel_id="39054_001",
|
||||||
|
reservation_id="4102",
|
||||||
|
reservation_number="AW-2",
|
||||||
|
reservation_date="2025-08-10",
|
||||||
|
).set_guest(
|
||||||
|
guest_id="889",
|
||||||
|
first_name="Aware",
|
||||||
|
last_name="Guest",
|
||||||
|
email="aware@example.com",
|
||||||
|
)
|
||||||
|
second.add_room(
|
||||||
|
arrival="2025-10-05",
|
||||||
|
departure="2025-10-08",
|
||||||
|
room_number="82",
|
||||||
|
status="departed",
|
||||||
|
revenue_logis_per_day=260.0,
|
||||||
|
)
|
||||||
|
multi.add_reservation(second)
|
||||||
|
|
||||||
|
service = ConversionService(test_db_session, hotel_id="39054_001")
|
||||||
|
await service.process_conversion_xml(multi.build_xml())
|
||||||
|
|
||||||
|
# Mark earliest stay as attributable to simulate campaign match
|
||||||
|
result = await test_db_session.execute(
|
||||||
|
select(Conversion)
|
||||||
|
.where(
|
||||||
|
Conversion.hotel_id == "39054_001",
|
||||||
|
Conversion.guest_id == 889,
|
||||||
|
)
|
||||||
|
.order_by(Conversion.reservation_date.asc())
|
||||||
|
)
|
||||||
|
conversions = result.scalars().all()
|
||||||
|
conversions[0].directly_attributable = True
|
||||||
|
conversions[1].directly_attributable = False
|
||||||
|
await test_db_session.commit()
|
||||||
|
|
||||||
|
stats = await service.classify_regular_guests(updated_within_hours=None)
|
||||||
|
assert stats["regular"] == 1
|
||||||
|
assert stats["awareness"] == 1
|
||||||
|
|
||||||
|
guest = await test_db_session.execute(
|
||||||
|
select(ConversionGuest).where(
|
||||||
|
ConversionGuest.hotel_id == "39054_001",
|
||||||
|
ConversionGuest.guest_id == 889,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
guest_record = guest.scalar_one()
|
||||||
|
assert guest_record.is_regular is True
|
||||||
|
assert guest_record.is_awareness_guest is True
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_conversion_guest_composite_key_prevents_duplicates(
|
async def test_conversion_guest_composite_key_prevents_duplicates(
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ from sqlalchemy import select
|
|||||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||||||
|
|
||||||
from alpine_bits_python.customer_service import CustomerService
|
from alpine_bits_python.customer_service import CustomerService
|
||||||
from alpine_bits_python.db import Base, Customer, HashedCustomer
|
from alpine_bits_python.db import Base, Customer
|
||||||
|
|
||||||
|
|
||||||
@pytest_asyncio.fixture
|
@pytest_asyncio.fixture
|
||||||
@@ -42,9 +42,9 @@ async def test_create_customer_creates_hashed_version(async_session: AsyncSessio
|
|||||||
assert customer.given_name == "John"
|
assert customer.given_name == "John"
|
||||||
|
|
||||||
# Check that hashed version was created
|
# Check that hashed version was created
|
||||||
hashed = await service.get_hashed_customer(customer.id)
|
hashed = await service.get_customer(customer.id)
|
||||||
assert hashed is not None
|
assert hashed is not None
|
||||||
assert hashed.customer_id == customer.id
|
assert hashed.id == customer.id
|
||||||
assert hashed.hashed_email is not None
|
assert hashed.hashed_email is not None
|
||||||
assert hashed.hashed_phone is not None
|
assert hashed.hashed_phone is not None
|
||||||
assert hashed.hashed_given_name is not None
|
assert hashed.hashed_given_name is not None
|
||||||
@@ -66,7 +66,7 @@ async def test_update_customer_updates_hashed_version(async_session: AsyncSessio
|
|||||||
customer = await service.create_customer(customer_data)
|
customer = await service.create_customer(customer_data)
|
||||||
|
|
||||||
# Get initial hashed email
|
# Get initial hashed email
|
||||||
hashed = await service.get_hashed_customer(customer.id)
|
hashed = await service.get_customer(customer.id)
|
||||||
original_hashed_email = hashed.hashed_email
|
original_hashed_email = hashed.hashed_email
|
||||||
|
|
||||||
# Update customer email
|
# Update customer email
|
||||||
@@ -74,7 +74,7 @@ async def test_update_customer_updates_hashed_version(async_session: AsyncSessio
|
|||||||
updated_customer = await service.update_customer(customer, update_data)
|
updated_customer = await service.update_customer(customer, update_data)
|
||||||
|
|
||||||
# Check that hashed version was updated
|
# Check that hashed version was updated
|
||||||
updated_hashed = await service.get_hashed_customer(updated_customer.id)
|
updated_hashed = await service.get_customer(updated_customer.id)
|
||||||
assert updated_hashed.hashed_email != original_hashed_email
|
assert updated_hashed.hashed_email != original_hashed_email
|
||||||
|
|
||||||
|
|
||||||
@@ -95,7 +95,7 @@ async def test_get_or_create_customer_creates_new(async_session: AsyncSession):
|
|||||||
assert customer.contact_id == "new123"
|
assert customer.contact_id == "new123"
|
||||||
|
|
||||||
# Verify hashed version exists
|
# Verify hashed version exists
|
||||||
hashed = await service.get_hashed_customer(customer.id)
|
hashed = await service.get_customer(customer.id)
|
||||||
assert hashed is not None
|
assert hashed is not None
|
||||||
|
|
||||||
|
|
||||||
@@ -145,10 +145,13 @@ async def test_hash_existing_customers_backfills(async_session: AsyncSession):
|
|||||||
|
|
||||||
# Verify no hashed version exists
|
# Verify no hashed version exists
|
||||||
result = await async_session.execute(
|
result = await async_session.execute(
|
||||||
select(HashedCustomer).where(HashedCustomer.customer_id == customer.id)
|
select(Customer).where(Customer.id == customer.id)
|
||||||
)
|
)
|
||||||
hashed = result.scalar_one_or_none()
|
hashed = result.scalar_one_or_none()
|
||||||
assert hashed is None
|
assert hashed, "Customer should exist."
|
||||||
|
|
||||||
|
assert hashed.hashed_given_name is None, "Hashed given name should be None."
|
||||||
|
assert hashed.hashed_email is None, "Hashed email should be None."
|
||||||
|
|
||||||
# Run backfill
|
# Run backfill
|
||||||
service = CustomerService(async_session)
|
service = CustomerService(async_session)
|
||||||
@@ -158,11 +161,12 @@ async def test_hash_existing_customers_backfills(async_session: AsyncSession):
|
|||||||
|
|
||||||
# Verify hashed version now exists
|
# Verify hashed version now exists
|
||||||
result = await async_session.execute(
|
result = await async_session.execute(
|
||||||
select(HashedCustomer).where(HashedCustomer.customer_id == customer.id)
|
select(Customer).where(Customer.id == customer.id)
|
||||||
)
|
)
|
||||||
hashed = result.scalar_one_or_none()
|
hashed = result.scalar_one_or_none()
|
||||||
assert hashed is not None
|
assert hashed is not None, "Customer should still exist after backfill."
|
||||||
assert hashed.hashed_email is not None
|
assert hashed.hashed_email is not None, "Hashed email should be populated."
|
||||||
|
assert hashed.hashed_given_name is not None, "Hashed given name should be populated."
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
@@ -201,7 +205,7 @@ async def test_hashing_normalization(async_session: AsyncSession):
|
|||||||
}
|
}
|
||||||
customer = await service.create_customer(customer_data)
|
customer = await service.create_customer(customer_data)
|
||||||
|
|
||||||
hashed = await service.get_hashed_customer(customer.id)
|
hashed = await service.get_customer(customer.id)
|
||||||
|
|
||||||
# Verify hashes exist (normalization should have occurred)
|
# Verify hashes exist (normalization should have occurred)
|
||||||
assert hashed.hashed_email is not None
|
assert hashed.hashed_email is not None
|
||||||
@@ -244,13 +248,17 @@ async def test_hash_existing_customers_normalizes_country_code(
|
|||||||
|
|
||||||
# Verify no hashed version exists yet
|
# Verify no hashed version exists yet
|
||||||
result = await async_session.execute(
|
result = await async_session.execute(
|
||||||
select(HashedCustomer).where(HashedCustomer.customer_id == customer.id)
|
select(Customer).where(Customer.id == customer.id)
|
||||||
)
|
)
|
||||||
hashed = result.scalar_one_or_none()
|
hashed = result.scalar_one_or_none()
|
||||||
assert hashed is None
|
assert hashed is not None, "Customer should exist."
|
||||||
|
|
||||||
|
assert hashed.hashed_given_name is None, "Hashed given name should be None."
|
||||||
|
assert hashed.hashed_email is None, "Hashed email should be None."
|
||||||
|
assert hashed.hashed_country_code is None, "Hashed country code should be None."
|
||||||
|
|
||||||
# Verify the customer has the invalid country code stored in the DB
|
# Verify the customer has the invalid country code stored in the DB
|
||||||
assert customer.country_code == "Italy"
|
assert hashed.country_code == "Italy"
|
||||||
|
|
||||||
# Run hash_existing_customers - this should normalize "Italy" to "IT"
|
# Run hash_existing_customers - this should normalize "Italy" to "IT"
|
||||||
# during validation and successfully create a hashed customer
|
# during validation and successfully create a hashed customer
|
||||||
@@ -263,7 +271,7 @@ async def test_hash_existing_customers_normalizes_country_code(
|
|||||||
# Verify hashed version was created
|
# Verify hashed version was created
|
||||||
await async_session.refresh(customer)
|
await async_session.refresh(customer)
|
||||||
result = await async_session.execute(
|
result = await async_session.execute(
|
||||||
select(HashedCustomer).where(HashedCustomer.customer_id == customer.id)
|
select(Customer).where(Customer.id == customer.id)
|
||||||
)
|
)
|
||||||
hashed = result.scalar_one_or_none()
|
hashed = result.scalar_one_or_none()
|
||||||
assert hashed is not None
|
assert hashed is not None
|
||||||
@@ -302,7 +310,7 @@ async def test_hash_existing_customers_normalizes_country_code(
|
|||||||
|
|
||||||
# Verify hashed version was created with correct hash
|
# Verify hashed version was created with correct hash
|
||||||
result = await async_session.execute(
|
result = await async_session.execute(
|
||||||
select(HashedCustomer).where(HashedCustomer.customer_id == customer2.id)
|
select(Customer).where(Customer.id == customer2.id)
|
||||||
)
|
)
|
||||||
hashed = result.scalar_one_or_none()
|
hashed = result.scalar_one_or_none()
|
||||||
assert hashed is not None
|
assert hashed is not None
|
||||||
|
|||||||
Reference in New Issue
Block a user