44 Commits
1.1.0 ... main

Author SHA1 Message Date
Jonas Linter
fce2dbc8de Fixed incorrect overlap detection 2025-12-09 15:29:35 +01:00
f6929ca7cc Small logging improvement 2025-12-09 14:13:58 +00:00
Jonas Linter
c73747e02d Update free_rooms is_closing season detection. Should also accept 1 as True 2025-12-09 14:45:22 +01:00
Jonas Linter
13e404d07c Only update updated_at timestamps if something actually changes. 2025-12-09 14:06:00 +01:00
Jonas Linter
c4bb9c524d Disabled free_rooms for main branch
The respective tests fail but that is too be expected
2025-12-09 12:36:08 +01:00
Jonas Linter
02b0ec3b9c Updated handshake reference file 2025-12-04 20:48:50 +01:00
Jonas Linter
2f458ed6df Added git-filter-repo 2025-12-04 20:32:21 +01:00
Jonas Linter
4b8bb1b57d No schema creation if public 2025-12-04 17:07:54 +01:00
Jonas Linter
4cff7c6081 Fixed setup for db where its empty 2025-12-04 17:05:48 +01:00
Jonas Linter
a6837197b6 FIxed date range overlap 2025-12-04 16:33:11 +01:00
Jonas Linter
16d12f5b62 Free rooms doesn't cause errors but further data verification is necessary 2025-12-04 16:14:40 +01:00
Jonas Linter
ea3d886b87 Activated free rooms 2025-12-04 15:32:29 +01:00
Jonas Linter
f728ce369a Merge branch 'db_fixes_plus_free_rooms' of https://gitea.99tales.net/jonas/alpinebits_python into db_fixes_plus_free_rooms 2025-12-03 22:37:08 +01:00
Jonas Linter
6cee77c232 Bit hamfisted but updates old records 2025-12-03 19:15:21 +01:00
Jonas Linter
6ea510174e Updated classification scheme 2025-12-03 19:02:34 +01:00
Jonas Linter
67c20bc18a Another logging message to see how the classification is doing 2025-12-03 18:52:53 +01:00
Jonas Linter
ff338ecb15 Added a logging statement to better see where the child dies 2025-12-03 18:44:32 +01:00
Jonas Linter
d61897b929 Added is_regular and awarness detection 2025-12-03 17:59:30 +01:00
Jonas Linter
95da5dcee9 And fixed typo 2025-12-03 17:47:30 +01:00
Jonas Linter
b4a6c1656d Added migration for awarness guest column 2025-12-03 17:47:23 +01:00
Jonas Linter
89f5b81983 Awarness guest now for real 2025-12-03 17:44:05 +01:00
Jonas Linter
8e5f045181 Added awarnesss flag for conversion_guests 2025-12-03 17:43:51 +01:00
Jonas Linter
f033abf76e Seems to mostly work now. Regular matching is still wrong 2025-12-03 17:05:58 +01:00
Jonas Linter
d03669873e Fine this needs more work 2025-12-03 16:12:07 +01:00
Jonas Linter
a6e4bcbe1b Significant matching fix. 2025-12-03 15:23:10 +01:00
Jonas Linter
12350578cc Finally fixed greenlet_spawn sqllchemy error. The horror 2025-12-03 14:13:20 +01:00
Jonas Linter
d2ed77e008 Readded fk constraint for conversion_guests 2025-12-03 12:27:17 +01:00
Jonas Linter
f2c40e1a23 Fixed removing hashed_customer 2025-12-03 12:12:37 +01:00
Jonas Linter
ad29a0a2f6 Not quite done but mostly starting to remove hashed_customer references 2025-12-03 12:00:02 +01:00
Jonas Linter
3175342cb2 Fixed greenlet error on rollback 2025-12-03 11:32:24 +01:00
Jonas Linter
1bdef3ee11 HashedCustomer is now no longer necessary 2025-12-03 11:10:27 +01:00
Jonas Linter
3193ceac63 Migration to single customer table works but conversion_service still needs updating 2025-12-03 10:51:18 +01:00
Jonas Linter
b572f660a7 Holy db migrations batman 2025-12-03 10:41:34 +01:00
Jonas Linter
e0c3b6e8af Reduced logging for conversion service 2025-12-02 17:03:07 +01:00
Jonas Linter
03aac27233 Replaced config auth with db auth 2025-12-02 16:43:56 +01:00
Jonas Linter
7ff3c44747 Reduced logging impact 2025-12-02 16:01:45 +01:00
Jonas Linter
87522711d4 Fixed small issue in webhook-processor not saving the results to the webhook_request table 2025-12-02 16:00:43 +01:00
Jonas Linter
f35f3f3dc9 Lots of refactoring and simplification in conversions_service 2025-12-02 15:45:40 +01:00
Jonas Linter
1f13991bfe Removed some unused fields 2025-12-02 15:38:39 +01:00
Jonas Linter
e2e2d12824 Update guest IDs in reservation tests for consistency 2025-12-02 15:27:14 +01:00
Jonas Linter
c0e601e308 Fixed up the damm tests 2025-12-02 15:24:30 +01:00
Jonas Linter
56d67984cf New pydantic model for ConversionGuest 2025-12-02 13:18:43 +01:00
Jonas Linter
ee80c57bcb Migration successfull. Does not cause any problems and new foreign keys work 2025-12-02 11:27:07 +01:00
e24866d8a0 Small db improvements. Still needs migration for alembic 2025-12-02 09:45:27 +00:00
43 changed files with 238868 additions and 394090 deletions

BIN
.coverage

Binary file not shown.

View File

@@ -0,0 +1,167 @@
"""Id columns changed to integer, foreign_keys added
Revision ID: b50c0f45030a
Revises: b2cfe2d3aabc
Create Date: 2025-12-02 11:06:25.850790
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'b50c0f45030a'
down_revision: Union[str, Sequence[str], None] = 'b2cfe2d3aabc'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
# Drop composite FK constraint first (references guest_id columns)
op.drop_constraint(
'conversions_hotel_id_guest_id_fkey', 'conversions', type_='foreignkey'
)
# Now convert the guest_id columns
op.alter_column('conversion_guests', 'guest_id',
existing_type=sa.VARCHAR(),
type_=sa.Integer(),
existing_nullable=False,
postgresql_using='guest_id::integer')
op.alter_column('conversion_guests', 'is_regular',
existing_type=sa.BOOLEAN(),
nullable=True)
op.drop_constraint(op.f('conversion_guests_hashed_customer_id_fkey'), 'conversion_guests', type_='foreignkey')
op.create_foreign_key(op.f('fk_conversion_guests_hashed_customer_id_hashed_customers'), 'conversion_guests', 'hashed_customers', ['hashed_customer_id'], ['id'])
# Create FK with NOT VALID to skip checking existing data
# (hotels table will be populated from config when app starts)
op.create_foreign_key(
op.f('fk_conversion_guests_hotel_id_hotels'),
'conversion_guests',
'hotels',
['hotel_id'],
['hotel_id'],
ondelete='CASCADE',
postgresql_not_valid=True
)
op.alter_column('conversions', 'hotel_id',
existing_type=sa.VARCHAR(),
nullable=False)
op.alter_column('conversions', 'pms_reservation_id',
existing_type=sa.VARCHAR(),
type_=sa.Integer(),
nullable=False,
postgresql_using='pms_reservation_id::integer')
op.alter_column('conversions', 'guest_id',
existing_type=sa.VARCHAR(),
type_=sa.Integer(),
existing_nullable=True,
postgresql_using='guest_id::integer')
op.alter_column('conversions', 'directly_attributable',
existing_type=sa.BOOLEAN(),
nullable=True)
op.alter_column('conversions', 'guest_matched',
existing_type=sa.BOOLEAN(),
nullable=True)
# Re-create composite FK constraint after column type changes
op.create_foreign_key(
'conversions_hotel_id_guest_id_fkey',
'conversions',
'conversion_guests',
['hotel_id', 'guest_id'],
['hotel_id', 'guest_id'],
ondelete='SET NULL'
)
op.create_unique_constraint('uq_conversion_hotel_reservation', 'conversions', ['hotel_id', 'pms_reservation_id'])
# Create FK with NOT VALID for same reason as above
op.create_foreign_key(
op.f('fk_conversions_hotel_id_hotels'),
'conversions',
'hotels',
['hotel_id'],
['hotel_id'],
ondelete='CASCADE',
postgresql_not_valid=True
)
op.drop_constraint(op.f('customers_contact_id_key'), 'customers', type_='unique')
op.create_unique_constraint(op.f('uq_customers_contact_id'), 'customers', ['contact_id'])
op.drop_constraint(op.f('hashed_customers_contact_id_key'), 'hashed_customers', type_='unique')
op.drop_constraint(op.f('hashed_customers_customer_id_key'), 'hashed_customers', type_='unique')
op.create_unique_constraint(op.f('uq_hashed_customers_contact_id'), 'hashed_customers', ['contact_id'])
op.create_unique_constraint(op.f('uq_hashed_customers_customer_id'), 'hashed_customers', ['customer_id'])
op.drop_index(op.f('ix_reservations_hashed_customer_id'), table_name='reservations')
op.drop_constraint(op.f('reservations_md5_unique_id_key'), 'reservations', type_='unique')
op.drop_constraint(op.f('reservations_unique_id_key'), 'reservations', type_='unique')
op.create_unique_constraint(op.f('uq_reservations_md5_unique_id'), 'reservations', ['md5_unique_id'])
op.create_unique_constraint(op.f('uq_reservations_unique_id'), 'reservations', ['unique_id'])
op.drop_index(op.f('idx_room_availability_inventory_date'), table_name='room_availability')
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_index(op.f('idx_room_availability_inventory_date'), 'room_availability', ['inventory_id', 'date'], unique=False)
op.drop_constraint(op.f('uq_reservations_unique_id'), 'reservations', type_='unique')
op.drop_constraint(op.f('uq_reservations_md5_unique_id'), 'reservations', type_='unique')
op.create_unique_constraint(op.f('reservations_unique_id_key'), 'reservations', ['unique_id'], postgresql_nulls_not_distinct=False)
op.create_unique_constraint(op.f('reservations_md5_unique_id_key'), 'reservations', ['md5_unique_id'], postgresql_nulls_not_distinct=False)
op.create_index(op.f('ix_reservations_hashed_customer_id'), 'reservations', ['hashed_customer_id'], unique=False)
op.drop_constraint(op.f('uq_hashed_customers_customer_id'), 'hashed_customers', type_='unique')
op.drop_constraint(op.f('uq_hashed_customers_contact_id'), 'hashed_customers', type_='unique')
op.create_unique_constraint(op.f('hashed_customers_customer_id_key'), 'hashed_customers', ['customer_id'], postgresql_nulls_not_distinct=False)
op.create_unique_constraint(op.f('hashed_customers_contact_id_key'), 'hashed_customers', ['contact_id'], postgresql_nulls_not_distinct=False)
op.drop_constraint(op.f('uq_customers_contact_id'), 'customers', type_='unique')
op.create_unique_constraint(op.f('customers_contact_id_key'), 'customers', ['contact_id'], postgresql_nulls_not_distinct=False)
op.drop_constraint(op.f('fk_conversions_hotel_id_hotels'), 'conversions', type_='foreignkey')
op.drop_constraint('uq_conversion_hotel_reservation', 'conversions', type_='unique')
# Drop composite FK constraint before changing column types back
op.drop_constraint(
'conversions_hotel_id_guest_id_fkey', 'conversions', type_='foreignkey'
)
op.alter_column('conversions', 'guest_matched',
existing_type=sa.BOOLEAN(),
nullable=False)
op.alter_column('conversions', 'directly_attributable',
existing_type=sa.BOOLEAN(),
nullable=False)
op.alter_column('conversions', 'guest_id',
existing_type=sa.Integer(),
type_=sa.VARCHAR(),
existing_nullable=True)
op.alter_column('conversions', 'pms_reservation_id',
existing_type=sa.Integer(),
type_=sa.VARCHAR(),
nullable=True)
op.alter_column('conversions', 'hotel_id',
existing_type=sa.VARCHAR(),
nullable=True)
op.drop_constraint(op.f('fk_conversion_guests_hotel_id_hotels'), 'conversion_guests', type_='foreignkey')
op.drop_constraint(op.f('fk_conversion_guests_hashed_customer_id_hashed_customers'), 'conversion_guests', type_='foreignkey')
op.create_foreign_key(op.f('conversion_guests_hashed_customer_id_fkey'), 'conversion_guests', 'hashed_customers', ['hashed_customer_id'], ['id'], ondelete='SET NULL')
op.alter_column('conversion_guests', 'is_regular',
existing_type=sa.BOOLEAN(),
nullable=False)
op.alter_column('conversion_guests', 'guest_id',
existing_type=sa.Integer(),
type_=sa.VARCHAR(),
existing_nullable=False)
# Re-create composite FK constraint after reverting column types
op.create_foreign_key(
'conversions_hotel_id_guest_id_fkey',
'conversions',
'conversion_guests',
['hotel_id', 'guest_id'],
['hotel_id', 'guest_id'],
ondelete='SET NULL'
)
# ### end Alembic commands ###

View File

@@ -0,0 +1,51 @@
"""remove_composite_fk_from_conversions
Revision ID: 694d52a883c3
Revises: b50c0f45030a
Create Date: 2025-12-03 09:50:18.506030
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '694d52a883c3'
down_revision: Union[str, Sequence[str], None] = 'b50c0f45030a'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(op.f('conversions_hotel_id_guest_id_fkey'), 'conversions', type_='foreignkey')
# Rename hotel_code to hotel_id (preserving data) and add FK to hotels
op.add_column('reservations', sa.Column('hotel_id', sa.String(), nullable=True))
op.execute('UPDATE reservations SET hotel_id = hotel_code')
op.drop_column('reservations', 'hotel_code')
# Add FK constraint without immediate validation (NOT VALID)
# This allows existing rows with non-existent hotel_ids to remain
# Future inserts/updates will still be validated
op.execute(
'ALTER TABLE reservations ADD CONSTRAINT fk_reservations_hotel_id_hotels '
'FOREIGN KEY (hotel_id) REFERENCES hotels (hotel_id) ON DELETE CASCADE NOT VALID'
)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
# Drop FK and rename hotel_id back to hotel_code (preserving data)
op.drop_constraint(op.f('fk_reservations_hotel_id_hotels'), 'reservations', type_='foreignkey')
op.add_column('reservations', sa.Column('hotel_code', sa.VARCHAR(), autoincrement=False, nullable=True))
op.execute('UPDATE reservations SET hotel_code = hotel_id')
op.drop_column('reservations', 'hotel_id')
op.create_foreign_key(op.f('conversions_hotel_id_guest_id_fkey'), 'conversions', 'conversion_guests', ['hotel_id', 'guest_id'], ['hotel_id', 'guest_id'], ondelete='SET NULL')
# ### end Alembic commands ###

View File

@@ -0,0 +1,104 @@
"""merge_hashed_customers_into_customers
Revision ID: 0fbeb40dbb2c
Revises: 694d52a883c3
Create Date: 2025-12-03 10:44:32.243220
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '0fbeb40dbb2c'
down_revision: Union[str, Sequence[str], None] = '694d52a883c3'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
# Add hashed columns to customers table
op.add_column('customers', sa.Column('hashed_email', sa.String(length=64), nullable=True))
op.add_column('customers', sa.Column('hashed_phone', sa.String(length=64), nullable=True))
op.add_column('customers', sa.Column('hashed_given_name', sa.String(length=64), nullable=True))
op.add_column('customers', sa.Column('hashed_surname', sa.String(length=64), nullable=True))
op.add_column('customers', sa.Column('hashed_city', sa.String(length=64), nullable=True))
op.add_column('customers', sa.Column('hashed_postal_code', sa.String(length=64), nullable=True))
op.add_column('customers', sa.Column('hashed_country_code', sa.String(length=64), nullable=True))
op.add_column('customers', sa.Column('hashed_gender', sa.String(length=64), nullable=True))
op.add_column('customers', sa.Column('hashed_birth_date', sa.String(length=64), nullable=True))
op.add_column('customers', sa.Column('created_at', sa.DateTime(timezone=True), nullable=True))
# Migrate data from hashed_customers to customers
op.execute('''
UPDATE customers c
SET
hashed_email = hc.hashed_email,
hashed_phone = hc.hashed_phone,
hashed_given_name = hc.hashed_given_name,
hashed_surname = hc.hashed_surname,
hashed_city = hc.hashed_city,
hashed_postal_code = hc.hashed_postal_code,
hashed_country_code = hc.hashed_country_code,
hashed_gender = hc.hashed_gender,
hashed_birth_date = hc.hashed_birth_date,
created_at = COALESCE(c.created_at, hc.created_at)
FROM hashed_customers hc
WHERE c.id = hc.customer_id
''')
# Update reservations to point to customers instead of hashed_customers
# First, update reservations.customer_id from reservations.hashed_customer_id
op.execute('''
UPDATE reservations r
SET customer_id = hc.customer_id
FROM hashed_customers hc
WHERE r.hashed_customer_id = hc.id
AND r.customer_id IS NULL
''')
# Update conversions to point to customers instead of hashed_customers
op.execute('''
UPDATE conversions c
SET customer_id = hc.customer_id
FROM hashed_customers hc
WHERE c.hashed_customer_id = hc.id
AND c.customer_id IS NULL
''')
# Update conversion_guests to point to customers instead of hashed_customers
op.execute('''
UPDATE conversion_guests cg
SET hashed_customer_id = NULL
WHERE hashed_customer_id IS NOT NULL
''')
# Now safe to drop the FK and column from reservations
op.drop_constraint(op.f('reservations_hashed_customer_id_fkey'), 'reservations', type_='foreignkey')
op.drop_column('reservations', 'hashed_customer_id')
# Note: We're keeping the hashed_customers table for now since conversion_service.py still uses it
# It can be dropped in a future migration after updating the application code
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('reservations', sa.Column('hashed_customer_id', sa.INTEGER(), autoincrement=False, nullable=True))
op.create_foreign_key(op.f('reservations_hashed_customer_id_fkey'), 'reservations', 'hashed_customers', ['hashed_customer_id'], ['id'], ondelete='CASCADE')
op.drop_column('customers', 'created_at')
op.drop_column('customers', 'hashed_birth_date')
op.drop_column('customers', 'hashed_gender')
op.drop_column('customers', 'hashed_country_code')
op.drop_column('customers', 'hashed_postal_code')
op.drop_column('customers', 'hashed_city')
op.drop_column('customers', 'hashed_surname')
op.drop_column('customers', 'hashed_given_name')
op.drop_column('customers', 'hashed_phone')
op.drop_column('customers', 'hashed_email')
# ### end Alembic commands ###

View File

@@ -0,0 +1,63 @@
"""removed hashed_customer completly
Revision ID: 3147e421bc47
Revises: 0fbeb40dbb2c
Create Date: 2025-12-03 11:42:05.722690
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '3147e421bc47'
down_revision: Union[str, Sequence[str], None] = '0fbeb40dbb2c'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_conversion_guests_hashed_customer_id'), table_name='conversion_guests')
op.drop_constraint(op.f('fk_conversion_guests_hashed_customer_id_hashed_customers'), 'conversion_guests', type_='foreignkey')
op.drop_column('conversion_guests', 'hashed_customer_id')
op.drop_index(op.f('ix_conversions_hashed_customer_id'), table_name='conversions')
op.drop_constraint(op.f('conversions_hashed_customer_id_fkey'), 'conversions', type_='foreignkey')
op.drop_column('conversions', 'hashed_customer_id')
op.drop_table('hashed_customers')
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('conversions', sa.Column('hashed_customer_id', sa.INTEGER(), autoincrement=False, nullable=True))
op.create_foreign_key(op.f('conversions_hashed_customer_id_fkey'), 'conversions', 'hashed_customers', ['hashed_customer_id'], ['id'])
op.create_index(op.f('ix_conversions_hashed_customer_id'), 'conversions', ['hashed_customer_id'], unique=False)
op.add_column('conversion_guests', sa.Column('hashed_customer_id', sa.INTEGER(), autoincrement=False, nullable=True))
op.create_foreign_key(op.f('fk_conversion_guests_hashed_customer_id_hashed_customers'), 'conversion_guests', 'hashed_customers', ['hashed_customer_id'], ['id'])
op.create_index(op.f('ix_conversion_guests_hashed_customer_id'), 'conversion_guests', ['hashed_customer_id'], unique=False)
op.create_table('hashed_customers',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('customer_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('contact_id', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('hashed_email', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('hashed_phone', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('hashed_given_name', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('hashed_surname', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('hashed_city', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('hashed_postal_code', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('hashed_country_code', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('hashed_gender', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('hashed_birth_date', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['customer_id'], ['customers.id'], name=op.f('hashed_customers_customer_id_fkey'), ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('hashed_customers_pkey')),
sa.UniqueConstraint('contact_id', name=op.f('uq_hashed_customers_contact_id'), postgresql_include=[], postgresql_nulls_not_distinct=False),
sa.UniqueConstraint('customer_id', name=op.f('uq_hashed_customers_customer_id'), postgresql_include=[], postgresql_nulls_not_distinct=False)
)
# ### end Alembic commands ###

View File

@@ -0,0 +1,32 @@
"""add conversions→conversion_guests fk
Revision ID: 263bed87114f
Revises: 3147e421bc47
Create Date: 2025-12-03 12:25:12.820232
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '263bed87114f'
down_revision: Union[str, Sequence[str], None] = '3147e421bc47'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_foreign_key('fk_conversions_guest', 'conversions', 'conversion_guests', ['hotel_id', 'guest_id'], ['hotel_id', 'guest_id'])
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('fk_conversions_guest', 'conversions', type_='foreignkey')
# ### end Alembic commands ###

View File

@@ -0,0 +1,32 @@
"""boolean to signify awarness match in guests
Revision ID: 1daea5172a03
Revises: 263bed87114f
Create Date: 2025-12-03 17:44:29.657898
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '1daea5172a03'
down_revision: Union[str, Sequence[str], None] = '263bed87114f'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('conversion_guests', sa.Column('is_awareness_guest', sa.Boolean(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('conversion_guests', 'is_awareness_guest')
# ### end Alembic commands ###

View File

@@ -0,0 +1,54 @@
"""pk_key_and_name_changes_for_room_availabilty
Revision ID: 872d95f54456
Revises: 1daea5172a03
Create Date: 2025-12-04 15:26:19.484062
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '872d95f54456'
down_revision: Union[str, Sequence[str], None] = '1daea5172a03'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('room_availability', sa.Column('bookable_type_2', sa.Integer(), nullable=True))
op.add_column('room_availability', sa.Column('out_of_order_type_6', sa.Integer(), nullable=True))
op.add_column('room_availability', sa.Column('not_bookable_type_9', sa.Integer(), nullable=True))
op.drop_index(op.f('ix_room_availability_date'), table_name='room_availability')
op.drop_index(op.f('ix_room_availability_inventory_id'), table_name='room_availability')
op.drop_constraint(op.f('uq_room_availability_unique_key'), 'room_availability', type_='unique')
op.drop_column('room_availability', 'count_type_6')
op.drop_column('room_availability', 'count_type_2')
op.drop_column('room_availability', 'count_type_9')
op.drop_column('room_availability', 'id')
# Create composite primary key on inventory_id and date
op.create_primary_key('pk_room_availability', 'room_availability', ['inventory_id', 'date'])
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
# Drop composite primary key before adding back the id column
op.drop_constraint('pk_room_availability', 'room_availability', type_='primary')
op.add_column('room_availability', sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False))
op.add_column('room_availability', sa.Column('count_type_9', sa.INTEGER(), autoincrement=False, nullable=True))
op.add_column('room_availability', sa.Column('count_type_2', sa.INTEGER(), autoincrement=False, nullable=True))
op.add_column('room_availability', sa.Column('count_type_6', sa.INTEGER(), autoincrement=False, nullable=True))
op.create_unique_constraint(op.f('uq_room_availability_unique_key'), 'room_availability', ['inventory_id', 'date'], postgresql_nulls_not_distinct=False)
op.create_index(op.f('ix_room_availability_inventory_id'), 'room_availability', ['inventory_id'], unique=False)
op.create_index(op.f('ix_room_availability_date'), 'room_availability', ['date'], unique=False)
op.drop_column('room_availability', 'not_bookable_type_9')
op.drop_column('room_availability', 'out_of_order_type_6')
op.drop_column('room_availability', 'bookable_type_2')
# ### end Alembic commands ###

File diff suppressed because one or more lines are too long

396
database_schema_analysis.md Normal file
View File

@@ -0,0 +1,396 @@
# Database Schema Analysis
## Overview
This document analyzes the database schema for normalization issues, redundancy, and potential improvements.
## Schema Summary
The database contains 13 tables organized around several core concepts:
- **Customer/Guest Management**: `customers`, `hashed_customers`, `conversion_guests`
- **Reservations**: `reservations`, `conversions`, `conversion_rooms`
- **Hotels**: `hotels`, `hotel_inventory`, `room_availability`
- **Webhooks**: `webhook_endpoints`, `webhook_requests`
- **Tracking**: `acked_requests`
---
## Major Issues Identified
### 1. **CRITICAL: Dual Customer Systems (Data Duplication)**
**Problem**: The schema maintains two parallel customer tracking systems:
- `customers` + `hashed_customers` (from Wix forms)
- `conversion_guests` (from PMS)
**Impact**:
- Same person can exist in both systems with no linkage
- `conversion_guests.hashed_customer_id` attempts to link but this is backward (many-to-one instead of one-to-one)
- Data inconsistency when same guest appears in both sources
**Details**:
```
customers (id=1, email="john@example.com")
└─ hashed_customers (id=1, customer_id=1, hashed_email="abc123...")
conversion_guests (hotel_id="HOTEL1", guest_id=42, guest_email="john@example.com")
└─ hashed_customer_id = NULL (or points to hashed_customers.id=1 after matching)
```
**Recommendation**:
- Create a unified `persons` table with a `source` field ("wix", "pms", "merged")
- Both `customers` and `conversion_guests` should reference this unified entity
- Implement proper guest matching/merging logic
---
### 2. **Data Redundancy: Hashed Values Stored Separately**
**Problem**: `hashed_customers` and `conversion_guests` store hashed values in separate columns alongside originals.
**Current Structure**:
```
customers:
- email_address (plaintext)
- phone (plaintext)
hashed_customers:
- customer_id (FK to customers)
- hashed_email
- hashed_phone
- hashed_given_name
...
```
**Issues**:
- Violates 3NF (derived data stored in separate table)
- Synchronization required between `customers` and `hashed_customers`
- If customer data changes, hashed version can become stale
- Extra JOIN required for every Meta Conversion API call
**Better Approach**:
Option A: Store hashed values directly in `customers` table as additional columns
Option B: Compute hashes on-the-fly (SHA256 is fast, ~1-2ms per hash)
**Recommendation**:
- **Short term**: Keep current structure but add triggers to auto-update hashed values
- **Long term**: Move hashed columns into `customers` table directly
---
### 3. **Advertising Account IDs Duplicated Across Tables**
**Problem**: `meta_account_id` and `google_account_id` appear in 3 places:
- `hotels` table (canonical source)
- `reservations` table (copied at creation time)
- Derived from `fbclid`/`gclid` tracking parameters
**Current Flow**:
```
hotels.meta_account_id = "123456"
reservation created with fbclid
reservations.meta_account_id = "123456" (copied from hotels)
```
**Issues**:
- Denormalization without clear benefit
- If hotel's account ID changes, old reservations have stale data
- Mixed source of truth (sometimes from hotels, sometimes from tracking params)
**Recommendation**:
- Remove `meta_account_id` and `google_account_id` from `reservations`
- Always derive from `hotels` table via JOIN
- If tracking-derived account differs from hotel's account, log a warning
---
### 4. **Hotel Information Duplicated in Reservations**
**Problem**: `reservations` table stores `hotel_code` and `hotel_name` but has no FK to `hotels` table.
**Issues**:
- Data can become inconsistent if hotel name changes
- No referential integrity
- Unclear if `hotel_code` matches `hotels.hotel_id`
**Recommendation**:
- Add `hotel_id` FK column to `reservations` pointing to `hotels.hotel_id`
- Remove `hotel_code` and `hotel_name` columns
- Derive hotel information via JOIN when needed
---
### 5. **Weak Foreign Key Consistency**
**Problem**: Mixed use of `ON DELETE` policies:
- Some FKs use `SET NULL` (appropriate for nullable relationships)
- Some use `CASCADE` (appropriate for child records)
- Some use `NO ACTION` (prevents deletion, may cause issues)
- `conversions` table has confusing composite FK setup with `hotel_id` and `guest_id`
**Examples**:
```sql
-- Good: Child data should be deleted with parent
hotel_inventory.hotel_id hotels.hotel_id (ON DELETE CASCADE)
-- Questionable: Should webhook requests survive hotel deletion?
webhook_requests.hotel_id hotels.hotel_id (ON DELETE NO ACTION)
-- Inconsistent: Why SET NULL vs CASCADE?
reservations.customer_id customers.id (ON DELETE SET NULL)
reservations.hashed_customer_id hashed_customers.id (ON DELETE CASCADE)
```
**Recommendation**:
Review each FK and establish consistent policies:
- Core data (hotels, customers): SET NULL to preserve historical records
- Supporting data (hashed_customers, inventory): CASCADE
- Transactional data (webhooks, conversions): Decide on retention policy
---
### 6. **Confusing Composite Foreign Key in Conversions**
**Problem**: The `conversions` table has a composite FK that's incorrectly mapped:
```python
# In db.py lines 650-655
__table_args__ = (
ForeignKeyConstraint(
["hotel_id", "guest_id"],
["conversion_guests.hotel_id", "conversion_guests.guest_id"],
ondelete="SET NULL",
),
)
```
**But the database shows**:
```
Foreign Keys:
hotel_id -> conversion_guests.hotel_id (ON DELETE SET NULL)
guest_id -> conversion_guests.hotel_id (ON DELETE SET NULL) # ← WRONG!
guest_id -> conversion_guests.guest_id (ON DELETE SET NULL)
hotel_id -> conversion_guests.guest_id (ON DELETE SET NULL) # ← WRONG!
```
**Impact**:
- Database has 4 FKs instead of 1 composite FK
- Mapping is incorrect (guest_id → hotel_id doesn't make sense)
- Could cause constraint violations or allow orphaned records
**Recommendation**:
- Fix the composite FK definition in SQLAlchemy
- Run a migration to drop incorrect FKs and recreate properly
---
### 7. **Unclear Relationship Between Reservations and Conversions**
**Problem**: The relationship between `reservations` (from Wix forms) and `conversions` (from PMS) is complex:
```
conversions:
- reservation_id (FK to reservations) - matched by tracking IDs
- customer_id (FK to customers) - matched by guest details
- hashed_customer_id (FK to hashed_customers) - matched by hashed guest details
- guest_id (FK to conversion_guests) - the actual PMS guest
```
**Issues**:
- Three different FK fields to three different customer/guest tables
- Matching logic is unclear from schema alone
- `directly_attributable` and `guest_matched` flags indicate matching quality, but this should be more explicit
**Recommendation**:
- Add a `match_confidence` enum field: "exact_id", "high_confidence", "medium_confidence", "no_match"
- Add `match_method` field to explain how the link was made
- Consider a separate `reservation_conversion_links` table to make the many-to-many relationship explicit
---
### 8. **Room Type Information Scattered**
**Problem**: Room information appears in multiple places:
- `reservations.room_type_code`, `room_classification_code`, `room_type`
- `conversion_rooms.room_type`, `room_number`
- `hotel_inventory.inv_type_code`, `inv_code`, `room_name`
**Issues**:
- No clear master data for room types
- Room type codes not standardized across sources
- No FK between `reservations.room_type_code` and `hotel_inventory.inv_type_code`
**Recommendation**:
- Create a `room_types` reference table linked to hotels
- Add FKs from reservations and conversion_rooms to room_types
- Standardize room type codes across all sources
---
## Normalization Analysis
### 1st Normal Form (1NF): ✅ PASS
- All columns contain atomic values
- **Exception**: `reservations.children_ages` stores comma-separated values
- Should be: separate `reservation_children` table with age column
### 2nd Normal Form (2NF): ⚠️ MOSTLY PASS
- All non-key attributes depend on the full primary key
- **Issue**: Some denormalized data exists (hotel names, account IDs in reservations)
### 3rd Normal Form (3NF): ❌ FAIL
Multiple violations:
- `hashed_customers` stores derived data (hashes) that depend on `customers`
- `reservations.meta_account_id` depends on `hotels` via hotel_code
- `reservations.hotel_name` depends on `hotels` via hotel_code
---
## Data Integrity Issues
### Missing Foreign Keys
1. **reservations.hotel_code** → should FK to hotels.hotel_id
2. **reservations.room_type_code** → should FK to hotel_inventory
3. **acked_requests.unique_id** → should FK to reservations.unique_id (or be nullable)
### Missing Indexes
Consider adding for query performance:
1. `customers.email_address` - for lookups during conversion matching
2. `conversions.reservation_date` - for time-based queries
3. `conversion_rooms.total_revenue` - for revenue analytics
4. `reservations.start_date`, `end_date` - for date range queries
### Missing Constraints
1. **Check constraints** for date logic:
- `reservations.end_date > start_date`
- `conversion_rooms.departure_date > arrival_date`
2. **Check constraints** for counts:
- `num_adults >= 0`, `num_children >= 0`
3. **NOT NULL constraints** on critical fields:
- `customers.contact_id` should be NOT NULL (it's the natural key)
- `conversions.hotel_id` is NOT NULL ✓ (good)
---
## Recommendations Priority
### HIGH PRIORITY (Data Integrity)
1. Fix composite FK in `conversions` table (lines 650-655 in db.py)
2. Add `hotel_id` FK to `reservations` table
3. Add missing NOT NULL constraints on natural keys
4. Add check constraints for date ranges and counts
### MEDIUM PRIORITY (Normalization)
5. Unify customer/guest systems into a single `persons` entity
6. Remove duplicate account ID fields from `reservations`
7. Remove `hotel_name` from `reservations` (derive via JOIN)
8. Create `reservation_children` table for children_ages
### LOW PRIORITY (Performance & Cleanup)
9. Move hashed fields into `customers` table (remove `hashed_customers`)
10. Add indexes for common query patterns
11. Create `room_types` reference table
12. Add `match_confidence` and `match_method` to `conversions`
---
## Positive Aspects
✅ Good use of composite keys (`conversion_guests`, `hotel_inventory`)
✅ Unique constraints on natural keys (`contact_id`, `webhook_secret`)
✅ Proper use of indexes on frequently queried fields
✅ Cascade deletion for child records (inventory, rooms)
✅ Tracking metadata (created_at, updated_at, first_seen, last_seen)
✅ Webhook deduplication via `payload_hash`
✅ JSON storage for flexible data (`conversion_rooms.daily_sales`)
---
## Suggested Refactoring Path
### Phase 1: Fix Critical Issues (1-2 days)
- Fix composite FK in conversions
- Add hotel_id FK to reservations
- Add missing constraints
### Phase 2: Normalize Customer Data (3-5 days)
- Create unified persons/guests table
- Migrate existing data
- Update matching logic
### Phase 3: Clean Up Redundancy (2-3 days)
- Remove duplicate account IDs
- Merge hashed_customers into customers
- Create room_types reference
### Phase 4: Enhance Tracking (1-2 days)
- Add match_confidence fields
- Improve conversion attribution
- Add missing indexes
---
## Query Examples Affected by Current Issues
### Issue: Duplicate Customer Data
```sql
-- Current: Find all reservations for a guest (requires checking both systems)
SELECT r.* FROM reservations r
WHERE r.customer_id = ?
OR r.hashed_customer_id IN (
SELECT id FROM hashed_customers WHERE contact_id = ?
);
-- After fix: Simple unified query
SELECT r.* FROM reservations r
WHERE r.person_id = ?;
```
### Issue: Missing Hotel FK
```sql
-- Current: Get hotel info for reservation (unreliable)
SELECT r.*, r.hotel_name
FROM reservations r
WHERE r.id = ?;
-- After fix: Reliable JOIN
SELECT r.*, h.hotel_name, h.meta_account_id
FROM reservations r
JOIN hotels h ON r.hotel_id = h.hotel_id
WHERE r.id = ?;
```
### Issue: Hashed Data in Separate Table
```sql
-- Current: Get customer for Meta API (requires JOIN)
SELECT hc.hashed_email, hc.hashed_phone
FROM reservations r
JOIN hashed_customers hc ON r.hashed_customer_id = hc.id
WHERE r.id = ?;
-- After fix: Direct access
SELECT c.hashed_email, c.hashed_phone
FROM reservations r
JOIN customers c ON r.customer_id = c.id
WHERE r.id = ?;
```
---
## Conclusion
The schema is **functional but has significant normalization and consistency issues**. The main problems are:
1. **Dual customer tracking systems** that should be unified
2. **Redundant storage of derived data** (hashes, account IDs)
3. **Missing foreign key relationships** (hotels, room types)
4. **Inconsistent deletion policies** across foreign keys
5. **Broken composite foreign key** in conversions table
The database violates 3NF in several places and could benefit from a refactoring effort. However, the issues are primarily architectural rather than critical bugs, so the system can continue operating while improvements are made incrementally.
**Estimated effort to fix all issues**: 1-2 weeks of development + testing
**Risk level**: Medium (requires data migration and careful FK updates)
**Recommended approach**: Incremental fixes starting with high-priority items

View File

@@ -18,6 +18,7 @@ dependencies = [
"fast-langdetect>=1.0.0",
"fastapi>=0.117.1",
"generateds>=2.44.3",
"git-filter-repo>=2.47.0",
"httpx>=0.28.1",
"lxml>=6.0.1",
"pandas>=2.3.3",

View File

@@ -1,47 +0,0 @@
#!/bin/bash
# Reset database and initialize Alembic from scratch
echo "=== Database Reset Script ==="
echo "This will drop all tables and reinitialize with Alembic"
echo ""
read -p "Are you sure? (type 'yes' to continue): " confirm
if [ "$confirm" != "yes" ]; then
echo "Aborted."
exit 1
fi
echo ""
echo "Step 1: Dropping all tables in the database..."
echo "Connect to your database and run:"
echo ""
echo " -- For PostgreSQL:"
echo " DROP SCHEMA public CASCADE;"
echo " CREATE SCHEMA public;"
echo " GRANT ALL ON SCHEMA public TO <your_user>;"
echo " GRANT ALL ON SCHEMA public TO public;"
echo ""
echo " -- Or if using a custom schema (e.g., alpinebits):"
echo " DROP SCHEMA alpinebits CASCADE;"
echo " CREATE SCHEMA alpinebits;"
echo ""
echo "Press Enter after you've run the SQL commands..."
read
echo ""
echo "Step 2: Running Alembic migrations..."
uv run alembic upgrade head
if [ $? -eq 0 ]; then
echo ""
echo "=== Success! ==="
echo "Database has been reset and migrations applied."
echo ""
echo "Current migration status:"
uv run alembic current
else
echo ""
echo "=== Error ==="
echo "Migration failed. Check the error messages above."
exit 1
fi

28
reset_db.sh Executable file
View File

@@ -0,0 +1,28 @@
#!/bin/bash
# Recreate the database: run DROP and CREATE in separate psql calls (DROP DATABASE cannot run inside a transaction block)
if ! docker exec -i meta_timescaledb psql -U meta_user -d postgres -c "DROP DATABASE IF EXISTS meta_insights;"; then
echo "Error: failed to drop database 'meta_insights'." >&2
exit 1
fi
if ! docker exec -i meta_timescaledb psql -U meta_user -d postgres -c "CREATE DATABASE meta_insights;"; then
echo "Error: failed to create database 'meta_insights'." >&2
exit 1
fi
# then import dump specified by argument only if previous commands succeeded
if [ -n "$1" ]; then
DUMP_FILE="$1"
if [ ! -r "$DUMP_FILE" ]; then
echo "Error: dump file '$DUMP_FILE' does not exist or is not readable." >&2
exit 2
fi
echo "Importing dump from $DUMP_FILE"
if ! docker exec -i meta_timescaledb psql -U meta_user -d meta_insights < "$DUMP_FILE"; then
echo "Error: failed to import dump '$DUMP_FILE' into 'meta_insights'." >&2
exit 3
fi
fi

View File

@@ -42,7 +42,7 @@ select res.id, res.created_at, con.created_at as "Con Created at", con.updated_a
left join alpinebits.conversions as con on con.reservation_id = res.id
left join alpinebits.conversion_guests as g on g.guest_id = con.guest_id
where hotel_code = '39054_001'
where hotel_id = '39054_001'
order by res.created_at desc limit 400
@@ -97,4 +97,44 @@ select sum(room.total_revenue::float), is_regular
group by is_regular
;
```
```
SELECT res.created_at AS "AnfrageDatum",
directly_attributable,
con.reservation_date,
res.start_date,
room.arrival_date,
res.end_date,
room.departure_date,
advertising_medium,
guest_first_name,
cus.given_name,
guest_last_name,
cus.surname,
total_revenue,
room.room_status,
room_number,
is_regular,
is_awareness_guest,
guest_matched,
con.hotel_id,
guest.guest_id
FROM alpinebits.conversions AS con
JOIN alpinebits.conversion_rooms AS room ON room.conversion_id = con.id
JOIN alpinebits.conversion_guests AS guest ON guest.guest_id = con.guest_id
LEFT JOIN alpinebits.reservations AS res ON res.id = con.reservation_id
LEFT JOIN alpinebits.customers AS cus ON cus.id = con.customer_id
WHERE reservation_date > '2025-01-01'
AND guest.guest_id IN (
SELECT DISTINCT g.guest_id
FROM alpinebits.conversions AS c
JOIN alpinebits.conversion_rooms AS r ON r.conversion_id = c.id
JOIN alpinebits.conversion_guests AS g ON g.guest_id = c.guest_id
WHERE c.reservation_date > '2025-01-01'
AND r.total_revenue > 0
)
ORDER BY guest_first_name, guest_last_name, room_status;
```

View File

@@ -768,9 +768,9 @@ def _process_single_reservation(
hotel_reservation_id=[hotel_res_id]
)
if reservation.hotel_code is None:
if reservation.hotel_id is None:
raise ValueError("Reservation hotel_code is None")
hotel_code = str(reservation.hotel_code)
hotel_code = str(reservation.hotel_id)
hotel_name = None if reservation.hotel_name is None else str(reservation.hotel_name)
basic_property_info = HotelReservation.ResGlobalInfo.BasicPropertyInfo(

View File

@@ -15,6 +15,7 @@ from enum import Enum
from typing import Any, Optional, override
from xsdata.formats.dataclass.serializers.config import SerializerConfig
from xsdata.exceptions import ParserError
from xsdata_pydantic.bindings import XmlParser, XmlSerializer
from alpine_bits_python.alpine_bits_helpers import (
@@ -33,6 +34,7 @@ from .generated.alpinebits import (
OtaReadRq,
WarningStatus,
)
from .hotel_service import HotelService
from .reservation_service import ReservationService
# Configure logging
@@ -413,20 +415,24 @@ def strip_control_chars(s):
return re.sub(r"[\x00-\x1F\x7F]", "", s)
def validate_hotel_authentication(
username: str, password: str, hotelid: str, config: dict
async def validate_hotel_authentication(
username: str,
password: str,
hotelid: str,
config: dict,
dbsession=None,
) -> bool:
"""Validate hotel authentication based on username, password, and hotel ID.
"""Validate hotel authentication against the database (fallback to config)."""
if dbsession is not None:
hotel_service = HotelService(dbsession)
hotel = await hotel_service.authenticate_hotel(username, password)
if hotel:
return hotel.hotel_id == hotelid
Example config
alpine_bits_auth:
- hotel_id: "123"
hotel_name: "Frangart Inn"
username: "alice"
password: !secret ALICE_PASSWORD
"""
# Fallback to config for legacy scenarios (e.g., during migration)
if not config or "alpine_bits_auth" not in config:
return False
auth_list = config["alpine_bits_auth"]
for auth in auth_list:
if (
@@ -471,8 +477,12 @@ class ReadAction(AlpineBitsAction):
return AlpineBitsResponse(
"Error: Something went wrong", HttpStatusCode.INTERNAL_SERVER_ERROR
)
read_request = XmlParser().from_string(request_xml, OtaReadRq)
try:
read_request = XmlParser().from_string(request_xml, OtaReadRq)
except ParserError:
return AlpineBitsResponse(
"Error: Invalid XML request", HttpStatusCode.BAD_REQUEST
)
hotel_read_request = read_request.read_requests.hotel_read_request
@@ -488,8 +498,12 @@ class ReadAction(AlpineBitsAction):
HttpStatusCode.UNAUTHORIZED,
)
if not validate_hotel_authentication(
client_info.username, client_info.password, hotelid, self.config
if not await validate_hotel_authentication(
client_info.username,
client_info.password,
hotelid,
self.config,
dbsession,
):
return AlpineBitsResponse(
f"Error: Unauthorized Read Request for this specific hotel {hotelname}. Check credentials",
@@ -522,7 +536,7 @@ class ReadAction(AlpineBitsAction):
await reservation_service.get_unacknowledged_reservations(
username=client_info.username,
client_id=client_info.client_id,
hotel_code=hotelid
hotel_code=hotelid,
)
)
else:
@@ -619,7 +633,9 @@ class NotifReportReadAction(AlpineBitsAction):
): # type: ignore
md5_unique_id = entry.unique_id.id
await reservation_service.record_acknowledgement(
client_id=client_info.client_id, unique_id=md5_unique_id, username=client_info.username
client_id=client_info.client_id,
unique_id=md5_unique_id,
username=client_info.username,
)
return AlpineBitsResponse(response_xml, HttpStatusCode.OK)
@@ -826,4 +842,4 @@ class AlpineBitsServer:
# Ensure FreeRoomsAction is registered with ServerCapabilities discovery
#from .free_rooms_action import FreeRoomsAction # noqa: E402,F401 disable for now
from .free_rooms_action import FreeRoomsAction

View File

@@ -2,7 +2,6 @@
import asyncio
import gzip
import hashlib
import json
import multiprocessing
import os
@@ -29,9 +28,10 @@ from fastapi.security import (
from pydantic import BaseModel
from slowapi.errors import RateLimitExceeded
from sqlalchemy import and_, select, update
from sqlalchemy.ext.asyncio import async_sessionmaker
from sqlalchemy.ext.asyncio import async_sessionmaker, AsyncSession
from sqlalchemy.orm import selectinload
from alpine_bits_python.hotel_service import HotelService
from alpine_bits_python.schemas import WebhookRequestData
from .alpinebits_server import (
@@ -46,14 +46,15 @@ from .const import HttpStatusCode, WebhookStatus
from .conversion_service import ConversionService
from .csv_import import CSVImporter
from .db import Customer as DBCustomer
from .db import Reservation as DBReservation
from .db import (
Hotel,
ResilientAsyncSession,
SessionMaker,
WebhookEndpoint,
WebhookRequest,
create_database_engine,
)
from .db import Reservation as DBReservation
from .db_setup import run_startup_tasks
from .email_monitoring import ReservationStatsCollector
from .email_service import create_email_service
@@ -137,7 +138,7 @@ async def push_listener(customer: DBCustomer, reservation: DBReservation, hotel)
server: AlpineBitsServer = app.state.alpine_bits_server
hotel_id = hotel["hotel_id"]
reservation_hotel_id = reservation.hotel_code
reservation_hotel_id = reservation.hotel_id
# Double-check hotel matching (should be guaranteed by dispatcher)
if hotel_id != reservation_hotel_id:
@@ -663,7 +664,8 @@ async def detect_language(
async def validate_basic_auth(
credentials: HTTPBasicCredentials = Depends(security_basic),
) -> str:
db_session=Depends(get_async_session),
) -> tuple[str, str]:
"""Validate basic authentication for AlpineBits protocol.
Returns username if valid, raises HTTPException if not.
@@ -675,26 +677,40 @@ async def validate_basic_auth(
detail="ERROR: Authentication required",
headers={"WWW-Authenticate": "Basic"},
)
valid = False
config = app.state.config
hotel_service = HotelService(db_session)
hotel = await hotel_service.authenticate_hotel(
credentials.username, credentials.password
)
for entry in config["alpine_bits_auth"]:
if hotel:
_LOGGER.info(
"AlpineBits authentication successful for user: %s (from database)",
credentials.username,
)
return credentials.username, credentials.password
# Fallback to config-defined credentials for legacy scenarios
config = app.state.config
valid = False
for entry in config.get("alpine_bits_auth", []):
if (
credentials.username == entry["username"]
and credentials.password == entry["password"]
credentials.username == entry.get("username")
and credentials.password == entry.get("password")
):
valid = True
_LOGGER.warning(
"AlpineBits authentication for user %s matched legacy config entry",
credentials.username,
)
break
if not valid:
raise HTTPException(
status_code=401,
detail="ERROR: Invalid credentials",
headers={"WWW-Authenticate": "Basic"},
)
_LOGGER.info(
"AlpineBits authentication successful for user: %s (from config)",
credentials.username,
)
return credentials.username, credentials.password
@@ -703,7 +719,7 @@ async def validate_basic_auth(
async def handle_webhook_unified(
request: Request,
webhook_secret: str,
db_session=Depends(get_async_session),
db_session: AsyncSession = Depends(get_async_session),
):
"""Unified webhook handler with deduplication and routing.
@@ -830,20 +846,34 @@ async def handle_webhook_unified(
if not webhook_endpoint:
raise HTTPException(status_code=404, detail="Webhook not found")
webhook_endpoint_id = webhook_endpoint.id
webhook_hotel_id = webhook_endpoint.hotel_id
# Verify hotel is active
if not webhook_endpoint.hotel.is_active:
raise HTTPException(status_code=404, detail="Hotel is not active")
# 3. Hash payload (canonical JSON for consistent hashing)
payload_json_str = json.dumps(payload, sort_keys=True)
payload_hash = hashlib.sha256(payload_json_str.encode("utf-8")).hexdigest()
payload_size = len(payload_json_str.encode("utf-8"))
# 3. Track payload metadata with canonical hashing handled by WebhookRequestData
payload_size = len(body)
# Check payload size limit (10MB)
if payload_size > 10 * 1024 * 1024:
_LOGGER.error("Payload too large: %d bytes", payload_size)
raise HTTPException(status_code=413, detail="Payload too large (max 10MB)")
webhook_request_data = WebhookRequestData(
payload_json=payload,
webhook_endpoint_id=webhook_endpoint_id,
hotel_id=webhook_hotel_id,
status=WebhookStatus.PROCESSING,
processing_started_at=timestamp,
created_at=timestamp,
source_ip=request.client.host if request.client else None,
user_agent=request.headers.get("user-agent"),
)
payload_hash = webhook_request_data.payload_hash
# 4. Check for duplicate with row-level locking
duplicate = await db_session.execute(
select(WebhookRequest)
@@ -890,31 +920,23 @@ async def handle_webhook_unified(
webhook_request.status = WebhookStatus.PROCESSING
webhook_request.processing_started_at = timestamp
else:
webhook_request_data = WebhookRequestData(
payload_hash=payload_hash,
webhook_endpoint_id=webhook_endpoint.id,
hotel_id=webhook_endpoint.hotel_id,
status=WebhookStatus.PROCESSING,
payload_json=payload,
processing_started_at=timestamp,
created_at=timestamp,
source_ip=request.client.host if request.client else None,
user_agent=request.headers.get("user-agent"),
)
# 5. Create new webhook_request
# 5. Create new webhook_request from validated data
webhook_request = WebhookRequest(**webhook_request_data.model_dump())
db_session.add(webhook_request)
await db_session.flush()
webhook_request_id = webhook_request.id
try:
# 6. Get processor for webhook_type
processor = webhook_registry.get_processor(webhook_endpoint.webhook_type)
if not processor:
raise ValueError(f"No processor for type: {webhook_endpoint.webhook_type}")
# Persist the webhook row before handing off to processors
await db_session.commit()
# 7. Process webhook with simplified interface
result = await processor.process(
webhook_request=webhook_request,
@@ -923,24 +945,50 @@ async def handle_webhook_unified(
event_dispatcher=request.app.state.event_dispatcher,
)
# 8. Update status
webhook_request.status = WebhookStatus.COMPLETED
webhook_request.processing_completed_at = datetime.now(UTC)
if not db_session.in_transaction():
await db_session.begin()
completion_values = {
"status": WebhookStatus.COMPLETED,
"processing_completed_at": datetime.now(UTC),
}
if isinstance(result, dict):
created_customer_id = result.get("customer_id")
created_reservation_id = result.get("reservation_id")
if created_customer_id:
completion_values["created_customer_id"] = created_customer_id
if created_reservation_id:
completion_values["created_reservation_id"] = created_reservation_id
await db_session.execute(
update(WebhookRequest)
.where(WebhookRequest.id == webhook_request_id)
.values(**completion_values)
)
await db_session.commit()
return {
**result,
"webhook_id": webhook_request.id,
"hotel_id": webhook_endpoint.hotel_id,
"webhook_id": webhook_request_id,
"hotel_id": webhook_hotel_id,
}
except Exception as e:
_LOGGER.exception("Error processing webhook: %s", e)
webhook_request.status = WebhookStatus.FAILED
webhook_request.last_error = str(e)[:2000]
webhook_request.processing_completed_at = datetime.now(UTC)
await db_session.rollback()
if not db_session.in_transaction():
await db_session.begin()
await db_session.execute(
update(WebhookRequest)
.where(WebhookRequest.id == webhook_request_id)
.values(
status=WebhookStatus.FAILED,
last_error=str(e)[:2000],
processing_completed_at=datetime.now(UTC),
)
)
await db_session.commit()
raise HTTPException(status_code=500, detail="Error processing webhook")
@@ -1134,6 +1182,7 @@ async def _process_conversion_xml_background(
filename: str,
session_maker: SessionMaker,
log_filename: Path,
hotel: Hotel,
):
"""Background task to process conversion XML.
@@ -1162,8 +1211,10 @@ async def _process_conversion_xml_background(
# Now process the conversion XML
_LOGGER.info("Starting database processing of %s", filename)
conversion_service = ConversionService(session_maker)
processing_stats = await conversion_service.process_conversion_xml(xml_content)
conversion_service = ConversionService(session_maker, hotel.hotel_id)
processing_stats = await conversion_service.process_conversion_xml(xml_content, run_full_guest_matching=True)
await conversion_service.classify_regular_guests(24)
_LOGGER.info(
"Conversion processing complete for %s: %s", filename, processing_stats
@@ -1250,6 +1301,10 @@ async def handle_xml_upload(
extension = Path(filename).suffix or ".xml"
log_filename = logs_dir / f"{base_filename}_{username}_{timestamp}{extension}"
hotel_service = HotelService(db_session)
hotel = await hotel_service.get_hotel_by_username(username)
_LOGGER.info(
"XML file queued for processing: %s by user %s (original: %s)",
log_filename,
@@ -1266,6 +1321,7 @@ async def handle_xml_upload(
filename,
session_maker,
log_filename,
hotel,
)
response_headers = {

File diff suppressed because it is too large Load Diff

View File

@@ -472,7 +472,7 @@ class CSVImporter:
num_adults=num_adults,
num_children=num_children,
children_ages=children_ages,
hotel_code=final_hotel_code,
hotel_id=final_hotel_code,
hotel_name=final_hotel_name,
offer=str(row.get("room_offer", "")).strip() or None,
user_comment=str(row.get("message", "")).strip() or None,

View File

@@ -6,7 +6,7 @@ from pydantic import ValidationError
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from .db import Customer, HashedCustomer
from .db import Customer
from .logging_config import get_logger
from .schemas import CustomerData
@@ -53,13 +53,13 @@ class CustomerService:
if "phone" in customer_data:
customer.phone = customer_data["phone"]
self.session.add(customer)
await self.session.flush() # Flush to get the customer.id
# Set creation timestamp
customer.created_at = datetime.now(UTC)
# Create hashed version
hashed_customer = customer.create_hashed_customer()
hashed_customer.created_at = datetime.now(UTC)
self.session.add(hashed_customer)
# Update hashed fields
customer.update_hashed_fields()
self.session.add(customer)
if auto_commit:
await self.session.commit()
@@ -130,29 +130,8 @@ class CustomerService:
if "phone" in update_data:
customer.phone = update_data["phone"]
# Update or create hashed version
result = await self.session.execute(
select(HashedCustomer).where(HashedCustomer.customer_id == customer.id)
)
hashed_customer = result.scalar_one_or_none()
if hashed_customer:
# Update existing hashed customer
new_hashed = customer.create_hashed_customer()
hashed_customer.hashed_email = new_hashed.hashed_email
hashed_customer.hashed_phone = new_hashed.hashed_phone
hashed_customer.hashed_given_name = new_hashed.hashed_given_name
hashed_customer.hashed_surname = new_hashed.hashed_surname
hashed_customer.hashed_city = new_hashed.hashed_city
hashed_customer.hashed_postal_code = new_hashed.hashed_postal_code
hashed_customer.hashed_country_code = new_hashed.hashed_country_code
hashed_customer.hashed_gender = new_hashed.hashed_gender
hashed_customer.hashed_birth_date = new_hashed.hashed_birth_date
else:
# Create new hashed customer if it doesn't exist
hashed_customer = customer.create_hashed_customer()
hashed_customer.created_at = datetime.now(UTC)
self.session.add(hashed_customer)
# Update hashed fields
customer.update_hashed_fields()
if auto_commit:
await self.session.commit()
@@ -200,26 +179,27 @@ class CustomerService:
# Create new customer (either no contact_id or customer doesn't exist)
return await self.create_customer(customer_data, auto_commit=auto_commit)
async def get_hashed_customer(self, customer_id: int) -> HashedCustomer | None:
async def get_customer(self, customer_id: int) -> Customer | None:
"""Get the hashed version of a customer.
Args:
customer_id: The customer ID
Returns:
HashedCustomer instance if found, None otherwise
Customer instance if found, None otherwise
"""
result = await self.session.execute(
select(HashedCustomer).where(HashedCustomer.customer_id == customer_id)
select(Customer).where(Customer.id == customer_id)
)
return result.scalar_one_or_none()
async def hash_existing_customers(self) -> int:
"""Hash all existing customers that don't have a hashed version yet.
"""Hash all existing customers that don't have hashed fields populated yet.
This is useful for backfilling hashed data for customers created
before the hashing system was implemented.
before the hashing system was implemented, or after migrating from
the separate hashed_customers table.
Also validates and sanitizes customer data (e.g., normalizes country
codes to uppercase). Customers with invalid data that cannot be fixed
@@ -229,62 +209,64 @@ class CustomerService:
Number of customers that were hashed
"""
# Get all customers
result = await self.session.execute(select(Customer))
# Get all customers without hashed data
result = await self.session.execute(
select(Customer).where(Customer.hashed_email.is_(None))
)
customers = result.scalars().all()
hashed_count = 0
skipped_count = 0
for customer in customers:
# Check if this customer already has a hashed version
existing_hashed = await self.get_hashed_customer(customer.id)
if not existing_hashed:
# Validate and sanitize customer data before hashing
customer_dict = {
"given_name": customer.given_name,
"surname": customer.surname,
"name_prefix": customer.name_prefix,
"email_address": customer.email_address,
"phone": customer.phone,
"email_newsletter": customer.email_newsletter,
"address_line": customer.address_line,
"city_name": customer.city_name,
"postal_code": customer.postal_code,
"country_code": customer.country_code,
"gender": customer.gender,
"birth_date": customer.birth_date,
"language": customer.language,
"address_catalog": customer.address_catalog,
"name_title": customer.name_title,
}
# Validate and sanitize customer data before hashing
customer_dict = {
"given_name": customer.given_name,
"surname": customer.surname,
"name_prefix": customer.name_prefix,
"email_address": customer.email_address,
"phone": customer.phone,
"email_newsletter": customer.email_newsletter,
"address_line": customer.address_line,
"city_name": customer.city_name,
"postal_code": customer.postal_code,
"country_code": customer.country_code,
"gender": customer.gender,
"birth_date": customer.birth_date,
"language": customer.language,
"address_catalog": customer.address_catalog,
"name_title": customer.name_title,
}
try:
# Validate through Pydantic (normalizes country code)
validated = CustomerData(**customer_dict)
try:
# Validate through Pydantic (normalizes country code)
validated = CustomerData(**customer_dict)
# Update customer with sanitized data
# Exclude 'phone_numbers' as Customer model uses 'phone' field
for key, value in validated.model_dump(
exclude_none=True, exclude={"phone_numbers"}
).items():
if hasattr(customer, key):
setattr(customer, key, value)
# Update customer with sanitized data
# Exclude 'phone_numbers' as Customer model uses 'phone' field
for key, value in validated.model_dump(
exclude_none=True, exclude={"phone_numbers"}
).items():
if hasattr(customer, key):
setattr(customer, key, value)
# Create hashed version with sanitized data
hashed_customer = customer.create_hashed_customer()
hashed_customer.created_at = datetime.now(UTC)
self.session.add(hashed_customer)
hashed_count += 1
# Update hashed fields with sanitized data
customer.update_hashed_fields()
except ValidationError as e:
# Skip customers with invalid data and log
skipped_count += 1
_LOGGER.warning(
"Skipping customer ID %s due to validation error: %s",
customer.id,
e,
)
# Set created_at if not already set
if not customer.created_at:
customer.created_at = datetime.now(UTC)
hashed_count += 1
except ValidationError as e:
# Skip customers with invalid data and log
skipped_count += 1
_LOGGER.warning(
"Skipping customer ID %s due to validation error: %s",
customer.id,
e,
)
if hashed_count > 0:
await self.session.commit()

View File

@@ -4,8 +4,6 @@ import os
from collections.abc import AsyncGenerator, Callable
from typing import TypeVar
from .const import WebhookStatus
from sqlalchemy import (
JSON,
Boolean,
@@ -17,6 +15,8 @@ from sqlalchemy import (
ForeignKeyConstraint,
Index,
Integer,
MetaData,
PrimaryKeyConstraint,
String,
UniqueConstraint,
func,
@@ -28,8 +28,9 @@ from sqlalchemy.ext.asyncio import (
async_sessionmaker,
create_async_engine,
)
from sqlalchemy.orm import backref, declarative_base, relationship
from sqlalchemy.orm import backref, declarative_base, foreign, relationship
from .const import WebhookStatus
from .logging_config import get_logger
_LOGGER = get_logger(__name__)
@@ -58,7 +59,16 @@ class Base:
# __table_args__ = {"schema": _SCHEMA}
Base = declarative_base(cls=Base)
# Define naming convention for constraints
metadata = MetaData(naming_convention={
"ix": "ix_%(column_0_label)s",
"uq": "uq_%(table_name)s_%(column_0_name)s",
"ck": "ck_%(table_name)s_%(constraint_name)s",
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
"pk": "pk_%(table_name)s"
})
Base = declarative_base(cls=Base, metadata=metadata)
# Type variable for async functions
T = TypeVar("T")
@@ -302,6 +312,20 @@ class Customer(Base):
language = Column(String)
address_catalog = Column(Boolean) # Added for XML
name_title = Column(String) # Added for XML
# Hashed fields for Meta Conversion API (SHA256)
hashed_email = Column(String(64))
hashed_phone = Column(String(64))
hashed_given_name = Column(String(64))
hashed_surname = Column(String(64))
hashed_city = Column(String(64))
hashed_postal_code = Column(String(64))
hashed_country_code = Column(String(64))
hashed_gender = Column(String(64))
hashed_birth_date = Column(String(64))
created_at = Column(DateTime(timezone=True))
reservations = relationship("Reservation", back_populates="customer")
def __repr__(self):
@@ -326,48 +350,19 @@ class Customer(Base):
# SHA256 hash
return hashlib.sha256(normalized.encode("utf-8")).hexdigest()
def create_hashed_customer(self):
"""Create a HashedCustomer instance from this Customer."""
return HashedCustomer(
customer_id=self.id,
contact_id=self.contact_id,
hashed_email=self._normalize_and_hash(self.email_address),
hashed_phone=self._normalize_and_hash(self.phone),
hashed_given_name=self._normalize_and_hash(self.given_name),
hashed_surname=self._normalize_and_hash(self.surname),
hashed_city=self._normalize_and_hash(self.city_name),
hashed_postal_code=self._normalize_and_hash(self.postal_code),
hashed_country_code=self._normalize_and_hash(self.country_code),
hashed_gender=self._normalize_and_hash(self.gender),
hashed_birth_date=self._normalize_and_hash(self.birth_date),
)
def update_hashed_fields(self):
"""Update the hashed fields based on current plaintext values."""
self.hashed_email = self._normalize_and_hash(self.email_address)
self.hashed_phone = self._normalize_and_hash(self.phone)
self.hashed_given_name = self._normalize_and_hash(self.given_name)
self.hashed_surname = self._normalize_and_hash(self.surname)
self.hashed_city = self._normalize_and_hash(self.city_name)
self.hashed_postal_code = self._normalize_and_hash(self.postal_code)
self.hashed_country_code = self._normalize_and_hash(self.country_code)
self.hashed_gender = self._normalize_and_hash(self.gender)
self.hashed_birth_date = self._normalize_and_hash(self.birth_date)
class HashedCustomer(Base):
"""Hashed customer data for Meta Conversion API.
Stores SHA256 hashed versions of customer PII according to Meta's requirements.
This allows sending conversion events without exposing raw customer data.
"""
__tablename__ = "hashed_customers"
id = Column(Integer, primary_key=True)
customer_id = Column(
Integer, ForeignKey("customers.id", ondelete="SET NULL"), unique=True, nullable=True
)
contact_id = Column(String, unique=True) # Keep unhashed for reference
hashed_email = Column(String(64)) # SHA256 produces 64 hex chars
hashed_phone = Column(String(64))
hashed_given_name = Column(String(64))
hashed_surname = Column(String(64))
hashed_city = Column(String(64))
hashed_postal_code = Column(String(64))
hashed_country_code = Column(String(64))
hashed_gender = Column(String(64))
hashed_birth_date = Column(String(64))
created_at = Column(DateTime(timezone=True))
customer = relationship("Customer", backref=backref("hashed_version", uselist=False, lazy="joined"))
class ConversionGuest(Base):
@@ -383,8 +378,14 @@ class ConversionGuest(Base):
__tablename__ = "conversion_guests"
# Natural keys from PMS - composite primary key
hotel_id = Column(String, nullable=False, primary_key=True, index=True)
guest_id = Column(String, nullable=False, primary_key=True, index=True)
hotel_id = Column(
String(50),
ForeignKey("hotels.hotel_id", ondelete="CASCADE"),
nullable=False,
primary_key=True,
index=True,
)
guest_id = Column(Integer, nullable=False, primary_key=True, index=True)
# Unhashed guest information (for reference/transition period)
guest_first_name = Column(String)
@@ -400,19 +401,30 @@ class ConversionGuest(Base):
hashed_country_code = Column(String(64))
hashed_birth_date = Column(String(64))
# Matched customer reference (nullable, filled after matching)
hashed_customer_id = Column(Integer, ForeignKey("hashed_customers.id"), nullable=True, index=True)
# Guest classification
is_regular = Column(Boolean, default=False) # True if guest has many prior stays before appearing in our reservations
is_regular = Column(
Boolean, default=False
) # True if guest has many prior stays before appearing in our reservations
# Guest classification
is_awareness_guest = Column(
Boolean, default=False
) # True if guests first stay was from our campaigns
# Metadata
first_seen = Column(DateTime(timezone=True))
last_seen = Column(DateTime(timezone=True))
# Relationships
conversions = relationship("Conversion", back_populates="guest")
hashed_customer = relationship("HashedCustomer", backref="conversion_guests")
conversions = relationship(
"Conversion",
back_populates="guest",
foreign_keys="[Conversion.hotel_id, Conversion.guest_id]",
primaryjoin="and_(ConversionGuest.hotel_id == foreign(Conversion.hotel_id), "
"ConversionGuest.guest_id == foreign(Conversion.guest_id))",
)
@staticmethod
def _normalize_and_hash(value):
@@ -428,7 +440,7 @@ class ConversionGuest(Base):
def create_from_conversion_data(
cls,
hotel_id: str,
guest_id: str | None,
guest_id: int | None,
guest_first_name: str | None,
guest_last_name: str | None,
guest_email: str | None,
@@ -483,7 +495,9 @@ class ConversionGuest(Base):
self.hashed_country_code = self._normalize_and_hash(guest_country_code)
if guest_birth_date:
self.guest_birth_date = guest_birth_date
self.hashed_birth_date = self._normalize_and_hash(guest_birth_date.isoformat())
self.hashed_birth_date = self._normalize_and_hash(
guest_birth_date.isoformat()
)
self.last_seen = now
@@ -491,7 +505,6 @@ class Reservation(Base):
__tablename__ = "reservations"
id = Column(Integer, primary_key=True)
customer_id = Column(Integer, ForeignKey("customers.id", ondelete="SET NULL"))
hashed_customer_id = Column(Integer, ForeignKey("hashed_customers.id", ondelete="CASCADE"))
unique_id = Column(String, unique=True)
md5_unique_id = Column(String(32), unique=True) # max length 32 guaranteed
start_date = Column(Date)
@@ -513,15 +526,14 @@ class Reservation(Base):
# Advertising account IDs (stored conditionally based on fbclid/gclid presence)
meta_account_id = Column(String)
google_account_id = Column(String)
# Add hotel_code and hotel_name for XML
hotel_code = Column(String)
# Add hotel_id and hotel_name for XML
hotel_id = Column(String, ForeignKey("hotels.hotel_id", ondelete="CASCADE"))
hotel_name = Column(String)
# RoomTypes fields (optional)
room_type_code = Column(String)
room_classification_code = Column(String)
room_type = Column(String)
customer = relationship("Customer", back_populates="reservations")
hashed_customer = relationship("HashedCustomer", backref="reservations")
# Table for tracking acknowledged requests by client
@@ -541,7 +553,7 @@ class AckedRequest(Base):
) # Username of the client making the request
unique_id = Column(
String, index=True
) # Should match Reservation.form_id or another unique field
) # Matches the md5_unique_id in Reservation
timestamp = Column(DateTime(timezone=True))
@@ -573,23 +585,27 @@ class Conversion(Base):
Integer, ForeignKey("reservations.id"), nullable=True, index=True
)
customer_id = Column(Integer, ForeignKey("customers.id"), nullable=True, index=True)
hashed_customer_id = Column(
Integer, ForeignKey("hashed_customers.id"), nullable=True, index=True
)
# Reservation metadata from XML
hotel_id = Column(String, index=True) # hotelID attribute
guest_id = Column(String, nullable=True, index=True) # PMS guest ID, FK to conversion_guests
pms_reservation_id = Column(String, index=True) # id attribute from reservation
hotel_id = Column(
String(50),
ForeignKey("hotels.hotel_id", ondelete="CASCADE"),
nullable=False,
index=True,
) # hotelID attribute
pms_reservation_id = Column(
Integer, nullable=False, index=True
) # id attribute from reservation
guest_id = Column(
Integer, nullable=True, index=True
) # PMS guest ID, FK to conversion_guests
reservation_number = Column(String) # number attribute
reservation_date = Column(Date) # date attribute (when reservation was made)
creation_time = Column(DateTime(timezone=True)) # creationTime attribute
reservation_type = Column(String) # type attribute (e.g., "reservation")
booking_channel = Column(String) # bookingChannel attribute
# Advertising/tracking data - used for matching to existing reservations
advertising_medium = Column(
String, index=True
@@ -602,27 +618,37 @@ class Conversion(Base):
) # advertisingCampagne (contains fbclid/gclid)
# Attribution flags - track how this conversion was matched
directly_attributable = Column(Boolean, default=False) # Matched by ID (high confidence)
directly_attributable = Column(
Boolean, default=False
) # Matched by ID (high confidence)
guest_matched = Column(Boolean, default=False) # Matched by guest details only
# Metadata
created_at = Column(DateTime(timezone=True)) # When this record was imported
updated_at = Column(DateTime(timezone=True)) # When this record was last updated
# Composite foreign key constraint for ConversionGuest (hotel_id, guest_id)
# Table constraints
__table_args__ = (
UniqueConstraint(
"hotel_id", "pms_reservation_id", name="uq_conversion_hotel_reservation"
),
ForeignKeyConstraint(
["hotel_id", "guest_id"],
["conversion_guests.hotel_id", "conversion_guests.guest_id"],
ondelete="SET NULL",
name="fk_conversions_guest",
),
)
# Relationships
reservation = relationship("Reservation", backref="conversions")
customer = relationship("Customer", backref="conversions")
hashed_customer = relationship("HashedCustomer", backref="conversions")
guest = relationship("ConversionGuest", back_populates="conversions")
guest = relationship(
"ConversionGuest",
back_populates="conversions",
foreign_keys="[Conversion.hotel_id, Conversion.guest_id]",
primaryjoin="and_(Conversion.hotel_id == ConversionGuest.hotel_id, "
"Conversion.guest_id == ConversionGuest.guest_id)",
)
conversion_rooms = relationship(
"ConversionRoom", back_populates="conversion", cascade="all, delete-orphan"
)
@@ -688,7 +714,10 @@ class HotelInventory(Base):
id = Column(Integer, primary_key=True)
hotel_id = Column(
String(50), ForeignKey("hotels.hotel_id", ondelete="CASCADE"), nullable=False, index=True
String(50),
ForeignKey("hotels.hotel_id", ondelete="CASCADE"),
nullable=False,
index=True,
)
inv_type_code = Column(String(8), nullable=False, index=True)
inv_code = Column(String(16), nullable=True, index=True)
@@ -722,14 +751,15 @@ class RoomAvailability(Base):
__tablename__ = "room_availability"
id = Column(Integer, primary_key=True)
inventory_id = Column(
Integer, ForeignKey("hotel_inventory.id", ondelete="CASCADE"), nullable=False, index=True
Integer,
ForeignKey("hotel_inventory.id", ondelete="CASCADE"),
nullable=False,
)
date = Column(Date, nullable=False, index=True)
count_type_2 = Column(Integer, nullable=True)
count_type_6 = Column(Integer, nullable=True)
count_type_9 = Column(Integer, nullable=True)
date = Column(Date, nullable=False)
bookable_type_2 = Column(Integer, nullable=True)
out_of_order_type_6 = Column(Integer, nullable=True)
not_bookable_type_9 = Column(Integer, nullable=True)
is_closing_season = Column(Boolean, nullable=False, default=False)
last_updated = Column(DateTime(timezone=True), nullable=False)
update_type = Column(String(20), nullable=False)
@@ -737,7 +767,7 @@ class RoomAvailability(Base):
inventory_item = relationship("HotelInventory", back_populates="availability")
__table_args__ = (
UniqueConstraint("inventory_id", "date", name="uq_room_availability_unique_key"),
PrimaryKeyConstraint("inventory_id", "date", name="pk_room_availability"),
)
@@ -785,7 +815,9 @@ class WebhookEndpoint(Base):
id = Column(Integer, primary_key=True)
# Hotel association
hotel_id = Column(String(50), ForeignKey("hotels.hotel_id"), nullable=False, index=True)
hotel_id = Column(
String(50), ForeignKey("hotels.hotel_id"), nullable=False, index=True
)
# Webhook configuration
webhook_secret = Column(String(64), unique=True, nullable=False, index=True)
@@ -801,7 +833,7 @@ class WebhookEndpoint(Base):
webhook_requests = relationship("WebhookRequest", back_populates="webhook_endpoint")
__table_args__ = (
Index('idx_webhook_endpoint_hotel_type', 'hotel_id', 'webhook_type'),
Index("idx_webhook_endpoint_hotel_type", "hotel_id", "webhook_type"),
)
@@ -814,11 +846,17 @@ class WebhookRequest(Base):
# Request identification
payload_hash = Column(String(64), unique=True, nullable=False, index=True) # SHA256
webhook_endpoint_id = Column(Integer, ForeignKey("webhook_endpoints.id"), nullable=True, index=True)
hotel_id = Column(String(50), ForeignKey("hotels.hotel_id"), nullable=True, index=True)
webhook_endpoint_id = Column(
Integer, ForeignKey("webhook_endpoints.id"), nullable=True, index=True
)
hotel_id = Column(
String(50), ForeignKey("hotels.hotel_id"), nullable=True, index=True
)
# Processing tracking
status = Column(String(20), nullable=False, default=WebhookStatus.PENDING.value, index=True)
status = Column(
String(20), nullable=False, default=WebhookStatus.PENDING.value, index=True
)
# Status values: 'pending', 'processing', 'completed', 'failed' set by Enum WebhookStatus
processing_started_at = Column(DateTime(timezone=True), nullable=True)
@@ -839,16 +877,20 @@ class WebhookRequest(Base):
# Result tracking
created_customer_id = Column(Integer, ForeignKey("customers.id"), nullable=True)
created_reservation_id = Column(Integer, ForeignKey("reservations.id"), nullable=True)
created_reservation_id = Column(
Integer, ForeignKey("reservations.id"), nullable=True
)
# Relationships
webhook_endpoint = relationship("WebhookEndpoint", back_populates="webhook_requests")
webhook_endpoint = relationship(
"WebhookEndpoint", back_populates="webhook_requests"
)
hotel = relationship("Hotel")
customer = relationship("Customer")
reservation = relationship("Reservation")
__table_args__ = (
Index('idx_webhook_status_created', 'status', 'created_at'),
Index('idx_webhook_hotel_created', 'hotel_id', 'created_at'),
Index('idx_webhook_purge_candidate', 'status', 'purged_at', 'created_at'),
Index("idx_webhook_status_created", "status", "created_at"),
Index("idx_webhook_hotel_created", "hotel_id", "created_at"),
Index("idx_webhook_purge_candidate", "status", "purged_at", "created_at"),
)

View File

@@ -115,7 +115,7 @@ async def backfill_advertising_account_ids(
sql = text(
"UPDATE reservations "
"SET meta_account_id = :meta_account "
"WHERE hotel_code = :hotel_id "
"WHERE hotel_id = :hotel_id "
"AND fbclid IS NOT NULL "
"AND fbclid != '' "
"AND (meta_account_id IS NULL OR meta_account_id = '')"
@@ -141,7 +141,7 @@ async def backfill_advertising_account_ids(
sql = text(
"UPDATE reservations "
"SET google_account_id = :google_account "
"WHERE hotel_code = :hotel_id "
"WHERE hotel_id = :hotel_id "
"AND gclid IS NOT NULL "
"AND gclid != '' "
"AND (google_account_id IS NULL OR google_account_id = '')"
@@ -215,7 +215,7 @@ async def backfill_acked_requests_username(
UPDATE acked_requests
SET username = :username
WHERE unique_id IN (
SELECT md5_unique_id FROM reservations WHERE hotel_code = :hotel_id
SELECT md5_unique_id FROM reservations WHERE hotel_id = :hotel_id
)
AND username IS NULL
"""

View File

@@ -523,10 +523,10 @@ class ReservationStatsCollector:
async with self.async_sessionmaker() as session:
# Query reservations created in the reporting period
result = await session.execute(
select(Reservation.hotel_code, func.count(Reservation.id))
select(Reservation.hotel_id, func.count(Reservation.id))
.where(Reservation.created_at >= period_start)
.where(Reservation.created_at < period_end)
.group_by(Reservation.hotel_code)
.group_by(Reservation.hotel_id)
)
hotel_counts = dict(result.all())

View File

@@ -48,9 +48,9 @@ CLOSING_SEASON_TYPE = "__CLOSE" # <= 8 chars per spec
SOURCE_FREEROOMS = "FreeRooms"
COUNT_TYPE_MAP = {
InvCountCountType.VALUE_2: "count_type_2",
InvCountCountType.VALUE_6: "count_type_6",
InvCountCountType.VALUE_9: "count_type_9",
InvCountCountType.VALUE_2: "bookable_type_2",
InvCountCountType.VALUE_6: "out_of_order_type_6",
InvCountCountType.VALUE_9: "not_bookable_type_9",
}
@@ -125,11 +125,12 @@ class FreeRoomsAction(AlpineBitsAction):
code="401",
)
if not validate_hotel_authentication(
if not await validate_hotel_authentication(
client_info.username,
client_info.password,
hotel_code,
self.config,
dbsession,
):
raise FreeRoomsProcessingError(
f"Unauthorized FreeRooms notification for hotel {hotel_code}",
@@ -201,6 +202,167 @@ class FreeRoomsAction(AlpineBitsAction):
result = await session.execute(stmt)
return result.scalar_one_or_none()
def _validate_request(
self,
request: OtaHotelInvCountNotifRq,
update_type: str,
enforce_closing_order: bool,
) -> None:
"""
Validate the entire request before making any database changes.
This performs all validation checks upfront to fail fast and avoid
expensive rollbacks of database operations.
Args:
request: The parsed OTA request
update_type: "CompleteSet" or "Delta"
enforce_closing_order: Whether to enforce closing seasons must come first
Raises:
FreeRoomsProcessingError: If any validation fails
"""
inventories = request.inventories.inventory if request.inventories else []
if not inventories:
raise FreeRoomsProcessingError(
"Request must include at least one Inventory block",
HttpStatusCode.BAD_REQUEST,
)
# Special case: CompleteSet with single empty Inventory element to reset all availability
if (
update_type == "CompleteSet"
and len(inventories) == 1
and inventories[0].status_application_control is None
and inventories[0].inv_counts is None
):
# This is valid - it's a reset request
return
encountered_standard = False
has_categories = False # Tracks if we've seen category reports (no InvCode)
has_rooms = False # Tracks if we've seen individual room reports (with InvCode)
closing_season_ranges: list[tuple[date, date]] = []
# Track date ranges per room/category to detect overlaps
inventory_ranges: dict[tuple[str, str | None], list[tuple[date, date]]] = {}
for inventory in inventories:
sac = inventory.status_application_control
if sac is None:
raise FreeRoomsProcessingError(
"StatusApplicationControl element is required for each Inventory",
HttpStatusCode.BAD_REQUEST,
)
is_closing = self._is_closing_season(sac)
# Validate closing seasons
if is_closing:
# Closing seasons are only allowed in CompleteSet - fail fast
if update_type != "CompleteSet":
raise FreeRoomsProcessingError(
"Closing seasons are only allowed on CompleteSet updates",
HttpStatusCode.BAD_REQUEST,
)
if inventory.inv_counts is not None:
raise FreeRoomsProcessingError(
"Closing seasons cannot contain InvCounts data",
HttpStatusCode.BAD_REQUEST,
)
if enforce_closing_order and encountered_standard:
raise FreeRoomsProcessingError(
"Closing seasons must appear before other inventory entries",
HttpStatusCode.BAD_REQUEST,
)
if sac.inv_type_code or sac.inv_code:
raise FreeRoomsProcessingError(
"Closing season entries cannot specify InvTypeCode or InvCode",
HttpStatusCode.BAD_REQUEST,
)
# Validate and store date range
start_date, end_date = self._parse_date_range(sac.start, sac.end)
closing_season_ranges.append((start_date, end_date))
continue
# Mark that we've seen a non-closing inventory entry
encountered_standard = True
# Validate standard inventory entries
inv_type_code = (sac.inv_type_code or "").strip()
if not inv_type_code:
error_message = "InvTypeCode is required unless AllInvCode=\"true\" or similar truthy values"
_LOGGER.info(error_message)
raise FreeRoomsProcessingError(
error_message,
HttpStatusCode.BAD_REQUEST,
)
# Validate date range
start_date, end_date = self._parse_date_range(sac.start, sac.end)
# Check if this inventory entry has any counts (available rooms)
# Entries without counts represent unavailable rooms
has_availability = inventory.inv_counts is not None and inventory.inv_counts.inv_count
# Check for overlap with closing seasons
# Only entries with availability (counts) cannot overlap with closing seasons
# Entries without counts (unavailable rooms) can overlap with closing seasons
if has_availability:
for closing_start, closing_end in closing_season_ranges:
if self._date_ranges_overlap(start_date, end_date, closing_start, closing_end):
error_message = f"Inventory entry ({start_date} to {end_date}) overlaps with closing season ({closing_start} to {closing_end})"
_LOGGER.info(error_message)
raise FreeRoomsProcessingError(
error_message,
HttpStatusCode.BAD_REQUEST,
)
# Check for overlap with other inventory entries for the same room/category
inv_code = sac.inv_code.strip() if sac.inv_code else None
inventory_key = (inv_type_code, inv_code)
if inventory_key in inventory_ranges:
for existing_start, existing_end in inventory_ranges[inventory_key]:
if self._date_ranges_overlap(start_date, end_date, existing_start, existing_end):
room_desc = f"room '{inv_code}'" if inv_code else f"category '{inv_type_code}'"
raise FreeRoomsProcessingError(
f"Overlapping date ranges for {room_desc}: ({start_date} to {end_date}) and ({existing_start} to {existing_end})",
HttpStatusCode.BAD_REQUEST,
)
else:
inventory_ranges[inventory_key] = []
inventory_ranges[inventory_key].append((start_date, end_date))
# Validate that we don't mix categories and individual rooms
has_inv_code = sac.inv_code is not None and sac.inv_code.strip() != ""
if has_inv_code:
if has_categories:
raise FreeRoomsProcessingError(
"Mixing room categories and individual rooms in one request is not allowed",
HttpStatusCode.BAD_REQUEST,
)
has_rooms = True
else:
if has_rooms:
raise FreeRoomsProcessingError(
"Mixing room categories and individual rooms in one request is not allowed",
HttpStatusCode.BAD_REQUEST,
)
has_categories = True
# Validate counts
self._extract_counts(inventory.inv_counts)
# Check for overlapping closing seasons
for i, (start1, end1) in enumerate(closing_season_ranges):
for start2, end2 in closing_season_ranges[i + 1:]:
if self._date_ranges_overlap(start1, end1, start2, end2):
raise FreeRoomsProcessingError(
f"Closing seasons overlap: ({start1} to {end1}) and ({start2} to {end2})",
HttpStatusCode.BAD_REQUEST,
)
async def _process_complete_set(
self,
session: AsyncSession,
@@ -209,7 +371,16 @@ class FreeRoomsAction(AlpineBitsAction):
update_type: str,
inventory_cache: dict[tuple[str, str | None], HotelInventory],
) -> None:
# Validate first before making any database changes
self._validate_request(request, update_type, enforce_closing_order=True)
# Only delete if validation passes
# Delete availability data for all FreeRooms-sourced inventory
await self._delete_existing_availability(session, hotel.hotel_id)
# Delete stale inventory items that are sourced from FreeRooms
await self._delete_existing_inventory(session, hotel.hotel_id)
# Process the validated request
await self._process_inventories(
session, hotel, request, update_type, inventory_cache, enforce_closing_order=True
)
@@ -222,6 +393,10 @@ class FreeRoomsAction(AlpineBitsAction):
update_type: str,
inventory_cache: dict[tuple[str, str | None], HotelInventory],
) -> None:
# Validate first before making any database changes
self._validate_request(request, update_type, enforce_closing_order=False)
# Process the validated request
await self._process_inventories(
session, hotel, request, update_type, inventory_cache, enforce_closing_order=False
)
@@ -231,11 +406,29 @@ class FreeRoomsAction(AlpineBitsAction):
session: AsyncSession,
hotel_id: str,
) -> None:
"""Delete all room availability data for a hotel (regardless of source)."""
subquery = select(HotelInventory.id).where(HotelInventory.hotel_id == hotel_id)
await session.execute(
delete(RoomAvailability).where(RoomAvailability.inventory_id.in_(subquery))
)
async def _delete_existing_inventory(
self,
session: AsyncSession,
hotel_id: str,
) -> None:
"""Delete inventory items sourced from FreeRooms.
This preserves inventory items from other sources (e.g., HotelInventory endpoint)
as they are not managed by FreeRooms and should persist across CompleteSet updates.
"""
await session.execute(
delete(HotelInventory).where(
HotelInventory.hotel_id == hotel_id,
HotelInventory.source == SOURCE_FREEROOMS,
)
)
async def _process_inventories(
self,
session: AsyncSession,
@@ -245,42 +438,23 @@ class FreeRoomsAction(AlpineBitsAction):
inventory_cache: dict[tuple[str, str | None], HotelInventory],
enforce_closing_order: bool,
) -> None:
inventories = request.inventories.inventory if request.inventories else []
if not inventories:
raise FreeRoomsProcessingError(
"Request must include at least one Inventory block",
HttpStatusCode.BAD_REQUEST,
)
"""
Process validated inventory data and store in database.
Note: Validation should be done before calling this method via _validate_request().
This method focuses on data transformation and persistence.
"""
inventories = request.inventories.inventory if request.inventories else []
rows_to_upsert: list[dict[str, Any]] = []
now = datetime.now(UTC)
encountered_standard = False
for inventory in inventories:
sac = inventory.status_application_control
if sac is None:
raise FreeRoomsProcessingError(
"StatusApplicationControl element is required for each Inventory",
HttpStatusCode.BAD_REQUEST,
)
continue # Should not happen after validation
is_closing = self._is_closing_season(sac)
if is_closing:
if inventory.inv_counts is not None:
raise FreeRoomsProcessingError(
"Closing seasons cannot contain InvCounts data",
HttpStatusCode.BAD_REQUEST,
)
if update_type != "CompleteSet":
raise FreeRoomsProcessingError(
"Closing seasons are only allowed on CompleteSet updates",
HttpStatusCode.BAD_REQUEST,
)
if enforce_closing_order and encountered_standard:
raise FreeRoomsProcessingError(
"Closing seasons must appear before other inventory entries",
HttpStatusCode.BAD_REQUEST,
)
rows_to_upsert.extend(
await self._process_closing_season(
session, hotel, sac, update_type, now, inventory_cache
@@ -288,7 +462,6 @@ class FreeRoomsAction(AlpineBitsAction):
)
continue
encountered_standard = True
rows_to_upsert.extend(
await self._process_inventory_item(
session,
@@ -312,12 +485,7 @@ class FreeRoomsAction(AlpineBitsAction):
timestamp: datetime,
inventory_cache: dict[tuple[str, str | None], HotelInventory],
) -> list[dict[str, Any]]:
if sac.inv_type_code or sac.inv_code:
raise FreeRoomsProcessingError(
"Closing season entries cannot specify InvTypeCode or InvCode",
HttpStatusCode.BAD_REQUEST,
)
"""Process a closing season entry. Assumes validation already done."""
start_date, end_date = self._parse_date_range(sac.start, sac.end)
inventory_item = await self._ensure_inventory_item(
session,
@@ -330,9 +498,9 @@ class FreeRoomsAction(AlpineBitsAction):
base_payload = {
"inventory_id": inventory_item.id,
"count_type_2": None,
"count_type_6": None,
"count_type_9": None,
"bookable_type_2": None,
"out_of_order_type_6": None,
"not_bookable_type_9": None,
"is_closing_season": True,
"last_updated": timestamp,
"update_type": update_type,
@@ -357,21 +525,16 @@ class FreeRoomsAction(AlpineBitsAction):
timestamp: datetime,
inventory_cache: dict[tuple[str, str | None], HotelInventory],
) -> list[dict[str, Any]]:
"""Process a standard inventory item. Assumes validation already done."""
inv_type_code = (sac.inv_type_code or "").strip()
if not inv_type_code:
raise FreeRoomsProcessingError(
"InvTypeCode is required unless AllInvCode=\"true\"",
HttpStatusCode.BAD_REQUEST,
)
inv_code = sac.inv_code.strip() if sac.inv_code else None
start_date, end_date = self._parse_date_range(sac.start, sac.end)
counts = self._extract_counts(inv_counts)
base_counts = {
"count_type_2": counts.get("count_type_2"),
"count_type_6": counts.get("count_type_6"),
"count_type_9": counts.get("count_type_9"),
"bookable_type_2": counts.get("bookable_type_2"),
"out_of_order_type_6": counts.get("out_of_order_type_6"),
"not_bookable_type_9": counts.get("not_bookable_type_9"),
}
inventory_item = await self._ensure_inventory_item(
@@ -415,6 +578,15 @@ class FreeRoomsAction(AlpineBitsAction):
)
return start_date, end_date
def _date_ranges_overlap(
self, start1: date, end1: date, start2: date, end2: date
) -> bool:
"""Check if two date ranges overlap (inclusive).
Returns True if the ranges have any dates in common.
"""
return start1 <= end2 and start2 <= end1
def _iter_days(self, start_date: date, end_date: date):
current = start_date
while current <= end_date:
@@ -425,7 +597,12 @@ class FreeRoomsAction(AlpineBitsAction):
self,
sac: OtaHotelInvCountNotifRq.Inventories.Inventory.StatusApplicationControl,
) -> bool:
return (sac.all_inv_code or "").strip().lower() == "true"
"""Check if AllInvCode is a truthy boolean value.
Accepts: "true", "True", "TRUE", "1", "yes", "Yes", "YES", etc.
"""
value = (sac.all_inv_code or "").strip().lower()
return value in ("true", "1", "yes")
def _extract_counts(
self,
@@ -544,9 +721,9 @@ class FreeRoomsAction(AlpineBitsAction):
def _build_upsert_set(self, stmt):
return {
"count_type_2": stmt.excluded.count_type_2,
"count_type_6": stmt.excluded.count_type_6,
"count_type_9": stmt.excluded.count_type_9,
"bookable_type_2": stmt.excluded.bookable_type_2,
"out_of_order_type_6": stmt.excluded.out_of_order_type_6,
"not_bookable_type_9": stmt.excluded.not_bookable_type_9,
"is_closing_season": stmt.excluded.is_closing_season,
"last_updated": stmt.excluded.last_updated,
"update_type": stmt.excluded.update_type,
@@ -564,9 +741,9 @@ class FreeRoomsAction(AlpineBitsAction):
existing = result.scalar_one_or_none()
if existing:
existing.count_type_2 = row["count_type_2"]
existing.count_type_6 = row["count_type_6"]
existing.count_type_9 = row["count_type_9"]
existing.bookable_type_2 = row["bookable_type_2"]
existing.out_of_order_type_6 = row["out_of_order_type_6"]
existing.not_bookable_type_9 = row["not_bookable_type_9"]
existing.is_closing_season = row["is_closing_season"]
existing.last_updated = row["last_updated"]
existing.update_type = row["update_type"]

View File

@@ -244,3 +244,26 @@ class HotelService:
)
)
return result.scalar_one_or_none()
async def authenticate_hotel(self, username: str, password: str) -> Hotel | None:
"""Authenticate a hotel using username and password.
Args:
username: AlpineBits username
password: Plain text password submitted via HTTP basic auth
Returns:
Hotel instance if the credentials are valid and the hotel is active,
otherwise None.
"""
hotel = await self.get_hotel_by_username(username)
if not hotel:
return None
if not password:
return None
if verify_password(password, hotel.password_hash):
return hotel
return None

View File

@@ -7,7 +7,7 @@ from typing import Optional
from sqlalchemy import and_, select
from sqlalchemy.ext.asyncio import AsyncSession
from .db import AckedRequest, Customer, HashedCustomer, Reservation
from .db import AckedRequest, Customer, Reservation
from .schemas import ReservationData
@@ -64,17 +64,6 @@ class ReservationService:
reservation_data, customer_id
)
# Automatically populate hashed_customer_id from the customer
# Since hashed_customer is always created when a customer is created,
# we can get it by querying for the hashed_customer with matching customer_id
hashed_customer_result = await self.session.execute(
select(HashedCustomer).where(
HashedCustomer.customer_id == customer_id
)
)
hashed_customer = hashed_customer_result.scalar_one_or_none()
if hashed_customer:
reservation.hashed_customer_id = hashed_customer.id
self.session.add(reservation)
@@ -181,7 +170,7 @@ class ReservationService:
if end_date:
filters.append(Reservation.created_at <= end_date)
if hotel_code:
filters.append(Reservation.hotel_code == hotel_code)
filters.append(Reservation.hotel_id == hotel_code)
if filters:
query = query.where(and_(*filters))

View File

@@ -5,6 +5,10 @@ This script should be run before starting the application to ensure
the database schema is up to date. It can be run standalone or called
from run_api.py before starting uvicorn.
If the database is completely empty (no tables), it will create all tables
from the current SQLAlchemy models and stamp the database with the latest
migration version, avoiding the need to run historical migrations.
Usage:
uv run python -m alpine_bits_python.run_migrations
or
@@ -12,24 +16,160 @@ Usage:
run_migrations()
"""
import asyncio
import subprocess
import sys
from pathlib import Path
from sqlalchemy import text
from sqlalchemy.ext.asyncio import create_async_engine
from .config_loader import load_config
from .db import Base, get_database_schema, get_database_url
from .logging_config import get_logger
_LOGGER = get_logger(__name__)
async def is_database_empty() -> bool:
"""Check if the database has any tables in our schema.
Returns:
True if the database has no tables in the target schema, False otherwise.
"""
try:
app_config = load_config()
db_url = get_database_url(app_config)
schema = get_database_schema(app_config)
if not db_url:
_LOGGER.error("Database URL not configured")
return False
# Create async engine for checking
engine = create_async_engine(db_url, echo=False)
async with engine.connect() as conn:
# Set search path if schema is configured
if schema:
await conn.execute(text(f"SET search_path TO {schema}"))
# Check for any tables in the schema
result = await conn.execute(
text(
"""
SELECT COUNT(*)
FROM information_schema.tables
WHERE table_schema = :schema
"""
),
{"schema": schema or "public"},
)
count = result.scalar()
await engine.dispose()
return count == 0
except Exception as e:
_LOGGER.warning(f"Could not check if database is empty: {e}")
return False
async def create_all_tables() -> None:
"""Create all tables from SQLAlchemy models in an empty database."""
try:
app_config = load_config()
db_url = get_database_url(app_config)
schema = get_database_schema(app_config)
if not db_url:
_LOGGER.error("Database URL not configured")
sys.exit(1)
_LOGGER.info("Creating all database tables from SQLAlchemy models...")
# Create async engine
engine = create_async_engine(db_url, echo=False)
async with engine.begin() as conn:
# Set search path if schema is configured
if schema:
# Only create schema if it's not 'public' (public always exists)
if schema != "public":
await conn.execute(text(f"CREATE SCHEMA IF NOT EXISTS {schema}"))
await conn.execute(text(f"SET search_path TO {schema}"))
# Create all tables
await conn.run_sync(Base.metadata.create_all)
await engine.dispose()
_LOGGER.info("All tables created successfully")
except Exception as e:
_LOGGER.error(f"Failed to create tables: {e}")
sys.exit(1)
def stamp_database() -> None:
"""Stamp the database with the latest migration version.
This tells Alembic that the database is at the 'head' revision without
actually running the migration scripts.
"""
_LOGGER.info("Stamping database with latest migration version...")
project_root = Path(__file__).parent.parent.parent
try:
result = subprocess.run(
["alembic", "stamp", "head"],
cwd=project_root,
capture_output=True,
text=True,
check=True,
)
_LOGGER.info("Database stamped successfully")
_LOGGER.debug("Stamp output: %s", result.stdout)
except subprocess.CalledProcessError as e:
_LOGGER.error("Failed to stamp database:")
_LOGGER.error("Exit code: %d", e.returncode)
_LOGGER.error("stdout: %s", e.stdout)
_LOGGER.error("stderr: %s", e.stderr)
sys.exit(1)
except FileNotFoundError:
_LOGGER.error(
"Alembic not found. Please ensure it's installed: uv pip install alembic"
)
sys.exit(1)
def run_migrations() -> None:
"""Run Alembic migrations to upgrade database to latest schema.
This function runs 'alembic upgrade head' to apply all pending migrations.
It will exit the process if migrations fail.
If the database is empty, creates all tables from SQLAlchemy models
and stamps the database with the latest migration version.
Otherwise, runs 'alembic upgrade head' to apply all pending migrations.
Raises:
SystemExit: If migrations fail
"""
_LOGGER.info("Checking database state...")
# Check if database is empty
is_empty = asyncio.run(is_database_empty())
if is_empty:
_LOGGER.info(
"Database is empty - creating all tables from models and stamping version"
)
asyncio.run(create_all_tables())
stamp_database()
_LOGGER.info("Database initialization completed successfully")
return
# Database has tables, run normal migrations
_LOGGER.info("Running database migrations...")
# Get the project root directory (where alembic.ini is located)

View File

@@ -11,7 +11,7 @@ from XML generation (xsdata) follows clean architecture principles.
import hashlib
import json
from datetime import date, datetime
from datetime import UTC, date, datetime
from enum import Enum
from typing import Any
@@ -20,6 +20,35 @@ from pydantic import BaseModel, EmailStr, Field, field_validator, model_validato
from .const import WebhookStatus
# Generalized integer validator for reuse across models
def convert_to_int(field_name: str, v: Any) -> int:
"""Convert a value to integer, handling string inputs.
Args:
field_name: Name of the field being validated (for error messages)
v: Value to convert (can be int, str, or None)
Returns:
Integer value
Raises:
ValueError: If value is None or cannot be converted to int
"""
if v is None:
msg = f"{field_name} cannot be None"
raise ValueError(msg)
if isinstance(v, int):
return v
if isinstance(v, str):
try:
return int(v)
except ValueError as e:
msg = f"{field_name} must be a valid integer, got: {v}"
raise ValueError(msg) from e
msg = f"{field_name} must be int or str, got: {type(v)}"
raise ValueError(msg)
# Country name to ISO 3166-1 alpha-2 code mapping
COUNTRY_NAME_TO_CODE = {
# English names
@@ -102,7 +131,7 @@ class ReservationData(BaseModel):
num_adults: int = Field(..., ge=1)
num_children: int = Field(0, ge=0, le=10)
children_ages: list[int] = Field(default_factory=list)
hotel_code: str = Field(..., min_length=1, max_length=50)
hotel_id: str = Field(..., min_length=1, max_length=50)
hotel_name: str | None = Field(None, max_length=200)
offer: str | None = Field(None, max_length=500)
user_comment: str | None = Field(None, max_length=2000)
@@ -195,6 +224,7 @@ class CustomerData(BaseModel):
Returns:
2-letter ISO country code (uppercase) or None if input is None/empty
"""
if not v:
return None
@@ -367,8 +397,7 @@ class WebhookRequestData(BaseModel):
# Required fields
payload_json: dict[str, Any] | None = Field(
...,
description="Webhook payload (required for creation, nullable after purge)"
..., description="Webhook payload (required for creation, nullable after purge)"
)
# Auto-calculated from payload_json
@@ -376,7 +405,7 @@ class WebhookRequestData(BaseModel):
None,
min_length=64,
max_length=64,
description="SHA256 hash of canonical JSON payload (auto-calculated)"
description="SHA256 hash of canonical JSON payload (auto-calculated)",
)
# Optional foreign keys
@@ -455,35 +484,132 @@ class WebhookRequestData(BaseModel):
# Example usage in a service layer
class ReservationService:
"""Example service showing how to use Pydantic models with SQLAlchemy."""
class ConversionGuestData(BaseModel):
"""Validated conversion guest data from PMS XML.
def __init__(self, db_session):
self.db_session = db_session
Handles validation and hashing for guest records extracted from
hotel PMS conversion XML files.
"""
async def create_reservation(
self, reservation_data: ReservationData, customer_data: CustomerData
):
"""Create a reservation with validated data.
hotel_id: str = Field(..., min_length=1, max_length=50)
guest_id: int = Field(..., gt=0)
guest_first_name: str | None = Field(None, max_length=100)
guest_last_name: str | None = Field(None, max_length=100)
guest_email: str | None = Field(None, max_length=200)
guest_country_code: str | None = Field(None, max_length=10)
guest_birth_date: date | None = None
The data has already been validated by Pydantic before reaching here.
# Auto-calculated hashed fields
hashed_first_name: str | None = Field(None, max_length=64)
hashed_last_name: str | None = Field(None, max_length=64)
hashed_email: str | None = Field(None, max_length=64)
hashed_country_code: str | None = Field(None, max_length=64)
hashed_birth_date: str | None = Field(None, max_length=64)
# Timestamps
first_seen: datetime = Field(default_factory=lambda: datetime.now(UTC))
last_seen: datetime = Field(default_factory=lambda: datetime.now(UTC))
@staticmethod
def _normalize_and_hash(value: str | None) -> str | None:
"""Normalize and hash a value for privacy-preserving matching.
Uses the same logic as ConversionGuest._normalize_and_hash.
"""
from alpine_bits_python.db import Customer, Reservation
if value is None or value == "":
return None
# Normalize: lowercase, strip whitespace
normalized = value.lower().strip()
if not normalized:
return None
# Hash with SHA256
return hashlib.sha256(normalized.encode("utf-8")).hexdigest()
# Convert validated Pydantic model to SQLAlchemy model
db_customer = Customer(**customer_data.model_dump(exclude_none=True))
self.db_session.add(db_customer)
await self.db_session.flush() # Get the customer ID
@model_validator(mode="after")
def calculate_hashes(self) -> "ConversionGuestData":
"""Auto-calculate hashed fields from plain text fields."""
if self.hashed_first_name is None:
self.hashed_first_name = self._normalize_and_hash(self.guest_first_name)
if self.hashed_last_name is None:
self.hashed_last_name = self._normalize_and_hash(self.guest_last_name)
if self.hashed_email is None:
self.hashed_email = self._normalize_and_hash(self.guest_email)
if self.hashed_country_code is None:
self.hashed_country_code = self._normalize_and_hash(self.guest_country_code)
if self.hashed_birth_date is None and self.guest_birth_date is not None:
self.hashed_birth_date = self._normalize_and_hash(
self.guest_birth_date.isoformat()
)
return self
# Create reservation linked to customer
db_reservation = Reservation(
customer_id=db_customer.id,
**reservation_data.model_dump(
exclude={"children_ages"}
), # Handle separately
children_ages=",".join(map(str, reservation_data.children_ages)),
)
self.db_session.add(db_reservation)
await self.db_session.commit()
@field_validator("guest_id", mode="before")
@classmethod
def convert_guest_id_to_int(cls, v: Any) -> int:
"""Convert guest_id to integer (handles string input from XML)."""
return convert_to_int("guest_id", v)
return db_reservation, db_customer
model_config = {"from_attributes": True}
class ConversionData(BaseModel):
"""Validated conversion data from PMS XML.
Handles validation for conversion records extracted from
hotel PMS conversion XML files. This model ensures proper type conversion
and validation before creating a Conversion database entry.
"""
# Foreign key references (nullable - matched after creation)
reservation_id: int | None = Field(None, gt=0)
customer_id: int | None = Field(None, gt=0)
# Required reservation metadata from PMS
hotel_id: str = Field(..., min_length=1, max_length=50)
pms_reservation_id: int = Field(..., gt=0)
guest_id: int | None = Field(None, gt=0)
# Optional reservation metadata
reservation_number: str | None = Field(None, max_length=100)
reservation_date: date | None = None
creation_time: datetime | None = None
reservation_type: str | None = Field(None, max_length=50)
booking_channel: str | None = Field(None, max_length=100)
# Advertising/tracking data (used for matching)
advertising_medium: str | None = Field(None, max_length=200)
advertising_partner: str | None = Field(None, max_length=200)
advertising_campagne: str | None = Field(None, max_length=500)
# Attribution flags
directly_attributable: bool = Field(default=False)
guest_matched: bool = Field(default=False)
# Timestamps (auto-managed)
created_at: datetime = Field(default_factory=lambda: datetime.now(UTC))
updated_at: datetime = Field(default_factory=lambda: datetime.now(UTC))
@field_validator(
"pms_reservation_id", "guest_id", "reservation_id", "customer_id",
mode="before"
)
@classmethod
def convert_int_fields(cls, v: Any) -> int | None:
"""Convert integer fields from string to int (handles XML input)."""
if v is None or v == "":
return None
# Get the field name from the validation context if available
# For now, use a generic name since we handle multiple fields
return convert_to_int("field", v)
@field_validator("hotel_id", "reservation_number", "reservation_type",
"booking_channel", "advertising_medium", "advertising_partner",
"advertising_campagne", mode="before")
@classmethod
def strip_string_fields(cls, v: str | None) -> str | None:
"""Strip whitespace from string fields."""
if v is None:
return None
stripped = str(v).strip()
return stripped if stripped else None
model_config = {"from_attributes": True}

View File

@@ -51,7 +51,6 @@ from alpine_bits_python.db import (
AckedRequest,
Base,
Customer,
HashedCustomer,
Reservation,
get_database_url,
)
@@ -306,7 +305,7 @@ async def migrate_data(
user_comment=reservation.user_comment,
fbclid=reservation.fbclid,
gclid=reservation.gclid,
hotel_code=reservation.hotel_code,
hotel_code=reservation.hotel_id,
hotel_name=reservation.hotel_name,
room_type_code=reservation.room_type_code,
room_classification_code=reservation.room_classification_code,

View File

@@ -203,7 +203,7 @@ async def process_wix_form_submission(
"name_title": None,
}
# This automatically creates/updates both Customer and HashedCustomer
# This automatically creates/updates Customer
db_customer = await customer_service.get_or_create_customer(customer_data)
# Determine hotel_code and hotel_name
@@ -247,7 +247,7 @@ async def process_wix_form_submission(
num_adults=num_adults,
num_children=num_children,
children_ages=children_ages,
hotel_code=hotel_code,
hotel_id=hotel_code,
hotel_name=hotel_name,
offer=offer,
created_at=submissionTime,
@@ -323,6 +323,8 @@ async def process_wix_form_submission(
"received_keys": list(data.keys()),
"timestamp": timestamp,
"note": "No authentication required for this endpoint",
"customer_id": db_customer.id,
"reservation_id": db_reservation.id,
}
@@ -573,7 +575,7 @@ async def process_generic_webhook_submission(
"num_adults": num_adults,
"num_children": num_children,
"children_ages": children_ages,
"hotel_code": hotel_code,
"hotel_id": hotel_code,
"hotel_name": hotel_name,
"offer": selected_offers_str,
"utm_source": utm_source,

View File

@@ -59,7 +59,7 @@ async def load_test_data_from_db():
result = []
for reservation, customer in reservations_with_customers:
# Get hashed customer data
hashed_customer = await customer_service.get_hashed_customer(customer.id)
hashed_customer = await customer_service.get_customer(customer.id)
result.append(
{

View File

@@ -9,6 +9,43 @@ from typing import Optional
from xml.etree import ElementTree as ET
def validate_and_convert_id(field_name: str, value: str | int) -> str:
"""Validate that an ID field is convertible to integer and return as string.
This helper ensures ID fields (like reservation_id, guest_id) are valid integers,
which is important since the Pydantic models will convert them from strings to ints.
Args:
field_name: Name of the field for error messages
value: The ID value (can be string or int)
Returns:
String representation of the validated integer ID
Raises:
ValueError: If value cannot be converted to a valid positive integer
"""
def _raise_invalid_type_error():
"""Raise error for invalid ID type."""
msg = (
f"{field_name} must be convertible to a positive integer, "
f"got: {value!r} (type: {type(value).__name__})"
)
raise ValueError(msg)
try:
# Convert to int first to validate it's a valid integer
int_value = int(value)
if int_value <= 0:
msg = f"{field_name} must be a positive integer, got: {value}"
raise ValueError(msg)
# Return as string for XML attributes
return str(int_value)
except (ValueError, TypeError):
_raise_invalid_type_error()
class RoomReservationBuilder:
"""Builder for creating roomReservation XML elements with daily sales."""
@@ -133,7 +170,7 @@ class ReservationXMLBuilder:
def __init__(
self,
hotel_id: str,
reservation_id: str,
reservation_id: str | int,
reservation_number: str,
reservation_date: str,
creation_time: Optional[str] = None,
@@ -146,7 +183,7 @@ class ReservationXMLBuilder:
Args:
hotel_id: Hotel ID
reservation_id: Reservation ID
reservation_id: Reservation ID (must be convertible to positive integer)
reservation_number: Reservation number
reservation_date: Reservation date in YYYY-MM-DD format
creation_time: Creation timestamp (defaults to reservation_date + T00:00:00)
@@ -156,7 +193,7 @@ class ReservationXMLBuilder:
advertising_campagne: Advertising campaign
"""
self.hotel_id = hotel_id
self.reservation_id = reservation_id
self.reservation_id = validate_and_convert_id("reservation_id", reservation_id)
self.reservation_number = reservation_number
self.reservation_date = reservation_date
self.creation_time = creation_time or f"{reservation_date}T00:00:00"
@@ -170,7 +207,7 @@ class ReservationXMLBuilder:
def set_guest(
self,
guest_id: str,
guest_id: str | int,
first_name: str,
last_name: str,
email: str,
@@ -182,7 +219,7 @@ class ReservationXMLBuilder:
"""Set guest information for the reservation.
Args:
guest_id: Guest ID
guest_id: Guest ID (must be convertible to positive integer)
first_name: Guest first name
last_name: Guest last name
email: Guest email
@@ -194,8 +231,9 @@ class ReservationXMLBuilder:
Returns:
Self for method chaining
"""
validated_guest_id = validate_and_convert_id("guest_id", guest_id)
self.guest_data = {
"id": guest_id,
"id": validated_guest_id,
"firstName": first_name,
"lastName": last_name,
"email": email,

View File

@@ -98,7 +98,7 @@ def sample_reservation(sample_customer):
user_comment="Late check-in requested",
fbclid="PAZXh0bgNhZW0BMABhZGlkAasmYBTNE3QBp1jWuJ9zIpfEGRJMP63fMAMI405yvG5EtH-OT0PxSkAbBJaudFHR6cMtkdHu_aem_fopaFtECyVPNW9fmWfEkyA",
gclid="",
hotel_code="HOTEL123",
hotel_id="HOTEL123",
hotel_name="Alpine Paradise Resort",
)
data = reservation.model_dump(exclude_none=True)
@@ -136,7 +136,7 @@ def minimal_reservation(minimal_customer):
num_adults=1,
num_children=0,
children_ages=[],
hotel_code="HOTEL123",
hotel_id="HOTEL123",
created_at=datetime(2024, 12, 2, 12, 0, 0, tzinfo=UTC),
hotel_name="Alpine Paradise Resort",
)
@@ -403,7 +403,7 @@ class TestEdgeCases:
num_adults=1,
num_children=0,
children_ages="",
hotel_code="HOTEL123",
hotel_id="HOTEL123",
created_at=datetime.now(UTC),
)
@@ -434,7 +434,7 @@ class TestEdgeCases:
num_adults=2,
num_children=0,
children_ages=[],
hotel_code="HOTEL123",
hotel_id="HOTEL123",
created_at=datetime.now(UTC),
utm_source="facebook",
utm_medium="social",
@@ -851,7 +851,7 @@ class TestAcknowledgments:
num_adults=2,
num_children=0,
children_ages=[],
hotel_code="HOTEL123",
hotel_id="HOTEL123",
hotel_name="Alpine Paradise Resort",
created_at=datetime(2024, 11, 1, 12, 0, 0, tzinfo=UTC),
)
@@ -863,7 +863,7 @@ class TestAcknowledgments:
num_adults=2,
num_children=1,
children_ages=[10],
hotel_code="HOTEL123",
hotel_id="HOTEL123",
hotel_name="Alpine Paradise Resort",
created_at=datetime(2024, 11, 15, 10, 0, 0, tzinfo=UTC),
)

View File

@@ -523,7 +523,7 @@ class TestGenericWebhookEndpoint:
(r for r in reservations if r.customer_id == customer.id), None
)
assert reservation is not None, "Reservation should be created"
assert reservation.hotel_code == "HOTEL123"
assert reservation.hotel_id == "HOTEL123"
assert reservation.hotel_name == "Test Hotel"
assert reservation.num_adults == 2
assert reservation.num_children == 1
@@ -614,7 +614,7 @@ class TestGenericWebhookEndpoint:
result = await session.execute(select(Reservation))
reservations = result.scalars().all()
reservation = next(
(r for r in reservations if r.hotel_code == "HOTEL123"), None
(r for r in reservations if r.hotel_id == "HOTEL123"), None
)
assert reservation is not None, "Reservation should be created"
assert reservation.num_children == 3

View File

@@ -17,6 +17,7 @@ from alpine_bits_python.alpinebits_server import AlpineBitsServer
from alpine_bits_python.api import app
from alpine_bits_python.const import HttpStatusCode
from alpine_bits_python.db import Base, Hotel, RoomAvailability
from alpine_bits_python.hotel_service import hash_password
def build_request_xml(body: str, include_unique_id: bool = True) -> str:
@@ -118,7 +119,7 @@ def seed_hotel_if_missing(client: TestClient):
hotel_id="HOTEL123",
hotel_name="Integration Hotel",
username="testuser",
password_hash="integration-hash",
password_hash=hash_password("testpass"),
created_at=datetime.now(UTC),
updated_at=datetime.now(UTC),
is_active=True,
@@ -156,7 +157,7 @@ def test_freerooms_endpoint_complete_set(freerooms_client: TestClient, freerooms
rows = fetch_availability(freerooms_client)
assert len(rows) == 3
assert rows[0].count_type_2 == 3
assert rows[0].bookable_type_2 == 3
def test_freerooms_endpoint_delta_updates_existing_rows(
@@ -181,7 +182,7 @@ def test_freerooms_endpoint_delta_updates_existing_rows(
assert response.status_code == HttpStatusCode.OK
rows = fetch_availability(freerooms_client)
counts = {row.date.isoformat(): row.count_type_2 for row in rows}
counts = {row.date.isoformat(): row.bookable_type_2 for row in rows}
assert counts["2025-10-02"] == 1
assert counts["2025-10-01"] == 3

View File

@@ -20,6 +20,7 @@ import pytest
import pytest_asyncio
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
from sqlalchemy.orm import selectinload
from alpine_bits_python.conversion_service import ConversionService
from alpine_bits_python.csv_import import CSVImporter
@@ -29,7 +30,6 @@ from alpine_bits_python.db import (
ConversionGuest,
ConversionRoom,
Customer,
HashedCustomer,
Reservation,
)
@@ -130,9 +130,9 @@ class TestConversionServiceWithImportedData:
print(f"\nCSV Import Stats: {csv_stats}")
assert csv_stats["total_rows"] > 0, "CSV import should have processed rows"
assert (
csv_stats["created_reservations"] > 0
), "CSV import should create reservations"
assert csv_stats["created_reservations"] > 0, (
"CSV import should create reservations"
)
# Step 2: Load and process conversion XML
with xml_file.open(encoding="utf-8") as f:
@@ -143,7 +143,7 @@ class TestConversionServiceWithImportedData:
## Need to check if reservations and customers are now actually available in the db before proceeding
conversion_service = ConversionService(test_db_session)
conversion_service = ConversionService(test_db_session, hotel_id="39054_001")
stats = await conversion_service.process_conversion_xml(xml_content)
# BASELINE ASSERTIONS:
@@ -166,30 +166,36 @@ class TestConversionServiceWithImportedData:
EXPECTED_MATCHED_TO_CUSTOMER = 0
print(f"\nBaseline Match Counts:")
print("\nBaseline Match Counts:")
print(f" Total reservations in XML: {EXPECTED_TOTAL_RESERVATIONS}")
print(f" Total daily sales records: {EXPECTED_TOTAL_DAILY_SALES}")
print(f" Total conversion room records: {EXPECTED_TOTAL_ROOMS}")
print(f" Matched to reservation: {EXPECTED_MATCHED_TO_RESERVATION}")
match_rate = (EXPECTED_MATCHED_TO_RESERVATION / EXPECTED_TOTAL_RESERVATIONS * 100) if EXPECTED_TOTAL_RESERVATIONS > 0 else 0
match_rate = (
(EXPECTED_MATCHED_TO_RESERVATION / EXPECTED_TOTAL_RESERVATIONS * 100)
if EXPECTED_TOTAL_RESERVATIONS > 0
else 0
)
print(f" Match rate: {match_rate:.1f}%")
print(f" Matched to customer: {EXPECTED_MATCHED_TO_CUSTOMER}")
print(f" Match rate (to customer): {(EXPECTED_MATCHED_TO_CUSTOMER / EXPECTED_TOTAL_RESERVATIONS * 100) if EXPECTED_TOTAL_RESERVATIONS > 0 else 0:.1f}%")
print(
f" Match rate (to customer): {(EXPECTED_MATCHED_TO_CUSTOMER / EXPECTED_TOTAL_RESERVATIONS * 100) if EXPECTED_TOTAL_RESERVATIONS > 0 else 0:.1f}%"
)
# Verify baseline stability on subsequent runs
assert (
stats["total_reservations"] == EXPECTED_TOTAL_RESERVATIONS
), f"Total reservations should be {EXPECTED_TOTAL_RESERVATIONS}, got {stats['total_reservations']}"
assert (
stats["total_daily_sales"] == EXPECTED_TOTAL_DAILY_SALES
), f"Total daily sales should be {EXPECTED_TOTAL_DAILY_SALES}, got {stats['total_daily_sales']}"
assert (
stats["matched_to_reservation"] == EXPECTED_MATCHED_TO_RESERVATION
), f"Matched reservations should be {EXPECTED_MATCHED_TO_RESERVATION}, got {stats['matched_to_reservation']}"
assert stats["total_reservations"] == EXPECTED_TOTAL_RESERVATIONS, (
f"Total reservations should be {EXPECTED_TOTAL_RESERVATIONS}, got {stats['total_reservations']}"
)
assert stats["total_daily_sales"] == EXPECTED_TOTAL_DAILY_SALES, (
f"Total daily sales should be {EXPECTED_TOTAL_DAILY_SALES}, got {stats['total_daily_sales']}"
)
assert stats["matched_to_reservation"] == EXPECTED_MATCHED_TO_RESERVATION, (
f"Matched reservations should be {EXPECTED_MATCHED_TO_RESERVATION}, got {stats['matched_to_reservation']}"
)
assert (
stats["matched_to_customer"] == EXPECTED_MATCHED_TO_CUSTOMER
), f"Matched customers should be {EXPECTED_MATCHED_TO_CUSTOMER}, got {stats['matched_to_customer']}"
assert stats["matched_to_customer"] == EXPECTED_MATCHED_TO_CUSTOMER, (
f"Matched customers should be {EXPECTED_MATCHED_TO_CUSTOMER}, got {stats['matched_to_customer']}"
)
@pytest.mark.asyncio
async def test_conversion_room_revenue_aggregation(
@@ -219,7 +225,7 @@ class TestConversionServiceWithImportedData:
# File already has proper XML structure, just use it as-is
xml_content = xml_content.strip()
conversion_service = ConversionService(test_db_session)
conversion_service = ConversionService(test_db_session, hotel_id="39054_001")
stats = await conversion_service.process_conversion_xml(xml_content)
# Verify conversions were created
@@ -237,23 +243,25 @@ class TestConversionServiceWithImportedData:
# Note: Test data may not have revenue values in the XML
# The important thing is that we're capturing room-level data
print(f"\nRevenue Aggregation Stats:")
print("\nRevenue Aggregation Stats:")
print(f" Total conversion rooms: {len(all_rooms)}")
print(f" Rooms with revenue: {len(rooms_with_revenue)}")
if rooms_with_revenue:
# Verify revenue values are numeric and positive
for room in rooms_with_revenue:
assert isinstance(
room.total_revenue, (int, float)
), f"Revenue should be numeric, got {type(room.total_revenue)}"
assert (
room.total_revenue > 0
), f"Revenue should be positive, got {room.total_revenue}"
assert isinstance(room.total_revenue, (int, float)), (
f"Revenue should be numeric, got {type(room.total_revenue)}"
)
assert room.total_revenue > 0, (
f"Revenue should be positive, got {room.total_revenue}"
)
total_revenue = sum(room.total_revenue for room in rooms_with_revenue)
print(f" Total aggregated revenue: {total_revenue}")
print(f" Average revenue per room: {total_revenue / len(rooms_with_revenue)}")
print(
f" Average revenue per room: {total_revenue / len(rooms_with_revenue)}"
)
@pytest.mark.asyncio
async def test_conversion_matching_by_guest_details(
@@ -282,7 +290,9 @@ class TestConversionServiceWithImportedData:
dryrun=False,
)
assert csv_stats["created_reservations"] > 0, "Should have imported reservations"
assert csv_stats["created_reservations"] > 0, (
"Should have imported reservations"
)
# Process conversions
with xml_file.open(encoding="utf-8") as f:
@@ -291,7 +301,7 @@ class TestConversionServiceWithImportedData:
# File already has proper XML structure, just use it as-is
xml_content = xml_content.strip()
conversion_service = ConversionService(test_db_session)
conversion_service = ConversionService(test_db_session, hotel_id="39054_001")
stats = await conversion_service.process_conversion_xml(xml_content)
# Verify conversions were processed
@@ -307,14 +317,14 @@ class TestConversionServiceWithImportedData:
)
conversions_with_customers = result.scalars().all()
print(f"\nGuest Detail Matching:")
print("\nGuest Detail Matching:")
print(f" Total conversions: {len(all_conversions)}")
print(f" Conversions matched to customer: {len(conversions_with_customers)}")
print(f" Stats matched_to_customer: {stats['matched_to_customer']}")
# With this test data, matches may be 0 if guest names/emails don't align
# The important thing is that the matching logic runs without errors
print(f" Note: Matches depend on data alignment between CSV and XML files")
print(" Note: Matches depend on data alignment between CSV and XML files")
@pytest.mark.asyncio
async def test_conversion_service_error_handling(
@@ -323,7 +333,7 @@ class TestConversionServiceWithImportedData:
"""Test ConversionService handles invalid XML gracefully."""
invalid_xml = "<invalid>unclosed tag"
conversion_service = ConversionService(test_db_session)
conversion_service = ConversionService(test_db_session, hotel_id="39054_001")
with pytest.raises(ValueError, match="Invalid XML"):
await conversion_service.process_conversion_xml(invalid_xml)
@@ -333,7 +343,7 @@ class TestConversionServiceWithImportedData:
"""Test ConversionService handles empty/minimal XML."""
minimal_xml = '<?xml version="1.0"?><root></root>'
conversion_service = ConversionService(test_db_session)
conversion_service = ConversionService(test_db_session, hotel_id="39054_001")
stats = await conversion_service.process_conversion_xml(minimal_xml)
assert stats["total_reservations"] == 0
@@ -354,7 +364,7 @@ class TestConversionServiceWithImportedData:
with room_number='201', second has status='request' with room_number='202'
4. The old room entries (101, 102) should no longer exist in the database
"""
from tests.helpers import ReservationXMLBuilder, MultiReservationXMLBuilder
from tests.helpers import MultiReservationXMLBuilder, ReservationXMLBuilder
# First batch: Process two reservations
multi_builder1 = MultiReservationXMLBuilder()
@@ -363,13 +373,13 @@ class TestConversionServiceWithImportedData:
res1_v1 = (
ReservationXMLBuilder(
hotel_id="39054_001",
reservation_id="res_001",
reservation_number="RES-001",
reservation_id="100",
reservation_number="100",
reservation_date="2025-11-14",
reservation_type="request",
)
.set_guest(
guest_id="guest_001",
guest_id="100",
first_name="Alice",
last_name="Johnson",
email="alice@example.com",
@@ -388,13 +398,13 @@ class TestConversionServiceWithImportedData:
res2_v1 = (
ReservationXMLBuilder(
hotel_id="39054_001",
reservation_id="res_002",
reservation_number="RES-002",
reservation_id="101",
reservation_number="101",
reservation_date="2025-11-15",
reservation_type="reservation",
)
.set_guest(
guest_id="guest_002",
guest_id="101",
first_name="Bob",
last_name="Smith",
email="bob@example.com",
@@ -412,7 +422,7 @@ class TestConversionServiceWithImportedData:
xml_content1 = multi_builder1.build_xml()
# Process first batch
service = ConversionService(test_db_session)
service = ConversionService(test_db_session, hotel_id="39054_001")
stats1 = await service.process_conversion_xml(xml_content1)
assert stats1["total_reservations"] == 2
@@ -437,13 +447,13 @@ class TestConversionServiceWithImportedData:
res1_v2 = (
ReservationXMLBuilder(
hotel_id="39054_001",
reservation_id="res_001", # Same ID
reservation_number="RES-001", # Same number
reservation_id="100", # Same ID
reservation_number="100", # Same number
reservation_date="2025-11-14",
reservation_type="reservation", # Changed from request
)
.set_guest(
guest_id="guest_001",
guest_id="100",
first_name="Alice",
last_name="Johnson",
email="alice@example.com",
@@ -462,13 +472,13 @@ class TestConversionServiceWithImportedData:
res2_v2 = (
ReservationXMLBuilder(
hotel_id="39054_001",
reservation_id="res_002", # Same ID
reservation_number="RES-002", # Same number
reservation_id="101", # Same ID
reservation_number="101", # Same number
reservation_date="2025-11-15",
reservation_type="request", # Changed from reservation
)
.set_guest(
guest_id="guest_002",
guest_id="101",
first_name="Bob",
last_name="Smith",
email="bob@example.com",
@@ -533,6 +543,186 @@ class TestConversionServiceWithImportedData:
)
class TestConversionUpdatesAndMatching:
"""Tests covering conversion updates and core matching logic."""
@pytest.mark.asyncio
async def test_reprocessing_conversion_updates_metadata(self, test_db_session):
"""Ensure reprocessing a reservation updates metadata instead of duplicating."""
def build_xml(
*,
booking_channel: str,
advertising_medium: str,
advertising_partner: str,
room_number: str,
arrival: str,
departure: str,
revenue: float,
) -> str:
return f"""<?xml version="1.0"?>
<root>
<reservation id="2001" hotelID="39054_001" number="A-1" date="2025-01-05"
bookingChannel="{booking_channel}"
advertisingMedium="{advertising_medium}"
advertisingPartner="{advertising_partner}"
advertisingCampagne="abc123">
<guest id="900" firstName="Casey" lastName="Jordan" email="casey@example.com"/>
<roomReservations>
<roomReservation roomNumber="{room_number}" arrival="{arrival}" departure="{departure}" status="reserved">
<dailySales>
<dailySale date="{arrival}" revenueTotal="{revenue}"/>
<dailySale date="{departure}" revenueTotal="{revenue}"/>
</dailySales>
</roomReservation>
</roomReservations>
</reservation>
</root>"""
first_xml = build_xml(
booking_channel="OTA",
advertising_medium="META",
advertising_partner="cpc",
room_number="33",
arrival="2025-02-01",
departure="2025-02-03",
revenue=120.0,
)
service = ConversionService(test_db_session, hotel_id="39054_001")
stats_first = await service.process_conversion_xml(first_xml)
assert stats_first["total_reservations"] == 1
result = await test_db_session.execute(
select(Conversion)
.where(
Conversion.hotel_id == "39054_001",
Conversion.pms_reservation_id == 2001,
)
.options(selectinload(Conversion.conversion_rooms))
)
conversion = result.scalar_one()
assert conversion.booking_channel == "OTA"
assert conversion.advertising_partner == "cpc"
original_room_count = len(conversion.conversion_rooms)
assert original_room_count == 1
assert conversion.conversion_rooms[0].room_number == "33"
updated_xml = build_xml(
booking_channel="DIRECT",
advertising_medium="WEBSITE",
advertising_partner="organic",
room_number="44",
arrival="2025-02-02",
departure="2025-02-04",
revenue=150.0,
)
stats_second = await service.process_conversion_xml(updated_xml)
assert stats_second["total_reservations"] == 1
test_db_session.expire_all()
result = await test_db_session.execute(
select(Conversion)
.where(
Conversion.hotel_id == "39054_001",
Conversion.pms_reservation_id == 2001,
)
.options(selectinload(Conversion.conversion_rooms))
)
updated_conversion = result.scalar_one()
assert updated_conversion.booking_channel == "DIRECT"
assert updated_conversion.advertising_medium == "WEBSITE"
assert updated_conversion.advertising_partner == "organic"
assert len(updated_conversion.conversion_rooms) == 1
assert updated_conversion.conversion_rooms[0].room_number == "44"
assert updated_conversion.conversion_rooms[0].arrival_date.strftime(
"%Y-%m-%d"
) == "2025-02-02"
@pytest.mark.asyncio
async def test_advertising_match_uses_hashed_email_for_disambiguation(
self, test_db_session
):
"""Ensure hashed email filters ambiguous advertising matches."""
# Create two customers/reservations sharing the same click-id prefix
customer_a = Customer(
given_name="Lara",
surname="North",
email_address="lara@example.com",
contact_id="contact_a",
)
customer_a.update_hashed_fields()
customer_b = Customer(
given_name="Mia",
surname="West",
email_address="mia@example.com",
contact_id="contact_b",
)
customer_b.update_hashed_fields()
test_db_session.add_all([customer_a, customer_b])
await test_db_session.flush()
reservation_a = Reservation(
customer_id=customer_a.id,
unique_id="res_a",
md5_unique_id="A" * 32,
hotel_id="39054_001",
fbclid="click-prefix-111",
)
reservation_b = Reservation(
customer_id=customer_b.id,
unique_id="res_b",
md5_unique_id="B" * 32,
hotel_id="39054_001",
fbclid="click-prefix-222",
)
test_db_session.add_all([reservation_a, reservation_b])
await test_db_session.commit()
from tests.helpers import ReservationXMLBuilder
xml_content = (
ReservationXMLBuilder(
hotel_id="39054_001",
reservation_id="3001",
reservation_number="B-1",
reservation_date="2025-03-10",
advertising_campagne="click-prefix",
)
.set_guest(
guest_id="701",
first_name="Mia",
last_name="West",
email="mia@example.com",
)
.add_room(
arrival="2025-04-01",
departure="2025-04-03",
room_number="55",
status="reserved",
revenue_logis_per_day=180.0,
)
.build_xml()
)
service = ConversionService(test_db_session, hotel_id="39054_001")
stats = await service.process_conversion_xml(xml_content)
result = await test_db_session.execute(
select(Conversion)
.where(
Conversion.hotel_id == "39054_001",
Conversion.pms_reservation_id == 3001,
)
.options(selectinload(Conversion.guest))
)
conversion = result.scalar_one()
assert conversion.reservation_id == reservation_b.id
assert conversion.customer_id == customer_b.id
assert stats["matched_to_reservation"] == 1
assert stats["matched_to_customer"] == 0
class TestXMLBuilderUsage:
"""Demonstrate usage of XML builder helpers for creating test data."""
@@ -546,12 +736,12 @@ class TestXMLBuilderUsage:
xml_content = (
ReservationXMLBuilder(
hotel_id="39054_001",
reservation_id="test_123",
reservation_number="RES-123",
reservation_id="123",
reservation_number="123",
reservation_date="2025-11-14",
)
.set_guest(
guest_id="guest_001",
guest_id="157",
first_name="John",
last_name="Doe",
email="john@example.com",
@@ -563,34 +753,32 @@ class TestXMLBuilderUsage:
room_type="DZV",
room_number="101",
revenue_logis_per_day=150.0,
adults=2
adults=2,
)
.build_xml()
)
# Process the XML
service = ConversionService(test_db_session)
service = ConversionService(test_db_session, hotel_id="39054_001")
stats = await service.process_conversion_xml(xml_content)
assert stats["total_reservations"] == 1
assert stats["total_daily_sales"] == 5 # 4 nights + departure day
@pytest.mark.asyncio
async def test_using_xml_builder_for_multi_room_reservation(
self, test_db_session
):
async def test_using_xml_builder_for_multi_room_reservation(self, test_db_session):
"""Example: Create a reservation with multiple rooms."""
from tests.helpers import ReservationXMLBuilder
xml_content = (
ReservationXMLBuilder(
hotel_id="39054_001",
reservation_id="test_456",
reservation_number="RES-456",
reservation_id="456",
reservation_number="456",
reservation_date="2025-11-14",
)
.set_guest(
guest_id="guest_002",
guest_id="157",
first_name="Jane",
last_name="Smith",
email="jane@example.com",
@@ -610,7 +798,7 @@ class TestXMLBuilderUsage:
.build_xml()
)
service = ConversionService(test_db_session)
service = ConversionService(test_db_session, hotel_id="39054_001")
stats = await service.process_conversion_xml(xml_content)
assert stats["total_reservations"] == 1
@@ -620,7 +808,7 @@ class TestXMLBuilderUsage:
@pytest.mark.asyncio
async def test_using_multi_reservation_builder(self, test_db_session):
"""Example: Create multiple reservations in one XML document."""
from tests.helpers import ReservationXMLBuilder, MultiReservationXMLBuilder
from tests.helpers import MultiReservationXMLBuilder, ReservationXMLBuilder
multi_builder = MultiReservationXMLBuilder()
@@ -628,12 +816,12 @@ class TestXMLBuilderUsage:
res1 = (
ReservationXMLBuilder(
hotel_id="39054_001",
reservation_id="test_001",
reservation_number="RES-001",
reservation_id="175",
reservation_number="175",
reservation_date="2025-11-14",
)
.set_guest(
guest_id="guest_001",
guest_id="157",
first_name="Alice",
last_name="Johnson",
email="alice@example.com",
@@ -650,12 +838,12 @@ class TestXMLBuilderUsage:
res2 = (
ReservationXMLBuilder(
hotel_id="39054_001",
reservation_id="test_002",
reservation_id="2725",
reservation_number="RES-002",
reservation_date="2025-11-15",
)
.set_guest(
guest_id="guest_002",
guest_id="2525",
first_name="Bob",
last_name="Williams",
email="bob@example.com",
@@ -671,7 +859,7 @@ class TestXMLBuilderUsage:
xml_content = multi_builder.build_xml()
# Process the XML
service = ConversionService(test_db_session)
service = ConversionService(test_db_session, hotel_id="39054_001")
stats = await service.process_conversion_xml(xml_content)
assert stats["total_reservations"] == 2
@@ -683,14 +871,12 @@ class TestHashedMatchingLogic:
"""Test the hashed matching logic used in ConversionService."""
@pytest.mark.asyncio
async def test_conversion_guest_hashed_fields_are_populated(
self, test_db_session
):
async def test_conversion_guest_hashed_fields_are_populated(self, test_db_session):
"""Test that ConversionGuest properly stores hashed versions of guest data."""
# Create a conversion guest
conversion_guest = ConversionGuest.create_from_conversion_data(
hotel_id="test_hotel",
guest_id="guest_123",
guest_id=123,
guest_first_name="Margaret",
guest_last_name="Brown",
guest_email="margaret@example.com",
@@ -721,7 +907,6 @@ class TestHashedMatchingLogic:
assert conversion_guest.hashed_last_name == expected_hashed_last
assert conversion_guest.hashed_email == expected_hashed_email
@pytest.mark.asyncio
async def test_conversion_records_created_before_matching(
self, test_db_session, test_config
@@ -737,23 +922,24 @@ class TestHashedMatchingLogic:
test_db_session.add(customer)
await test_db_session.flush()
hashed_customer = customer.create_hashed_customer()
test_db_session.add(hashed_customer)
await test_db_session.flush()
customer.update_hashed_fields()
reservation = Reservation(
customer_id=customer.id,
unique_id="res_6",
hotel_code="hotel_1",
hotel_id="hotel_1",
)
test_db_session.add(reservation)
await test_db_session.commit()
PMS_RESERVATION_ID = 157
# Create conversion XML with matching hashed data
xml_content = f"""<?xml version="1.0"?>
<root>
<reservation id="pms_123" hotelID="hotel_1" number="RES001" date="2025-01-15">
<guest id="guest_001" firstName="David" lastName="Miller" email="david@example.com"/>
<reservation id="{PMS_RESERVATION_ID}" hotelID="hotel_1" number="378" date="2025-01-15">
<guest id="123" firstName="David" lastName="Miller" email="david@example.com"/>
<roomReservations>
<roomReservation roomNumber="101" arrival="2025-01-15" departure="2025-01-17" status="confirmed">
<dailySales>
@@ -764,12 +950,14 @@ class TestHashedMatchingLogic:
</reservation>
</root>"""
service = ConversionService(test_db_session)
service = ConversionService(test_db_session, hotel_id="39054_001")
stats = await service.process_conversion_xml(xml_content)
# Verify conversion was created
result = await test_db_session.execute(
select(Conversion).where(Conversion.pms_reservation_id == "pms_123")
select(Conversion).where(
Conversion.pms_reservation_id == PMS_RESERVATION_ID
)
)
conversion = result.scalar_one_or_none()
@@ -779,32 +967,166 @@ class TestHashedMatchingLogic:
# Verify conversion_guest was created with the correct data
from sqlalchemy.orm import selectinload
result_with_guest = await test_db_session.execute(
select(Conversion)
.where(Conversion.pms_reservation_id == "pms_123")
.where(Conversion.pms_reservation_id == PMS_RESERVATION_ID)
.options(selectinload(Conversion.guest))
)
conversion_with_guest = result_with_guest.scalar_one_or_none()
assert conversion_with_guest.guest is not None, "ConversionGuest relationship should exist"
assert conversion_with_guest.guest is not None, (
"ConversionGuest relationship should exist"
)
assert conversion_with_guest.guest.guest_first_name == "David"
assert conversion_with_guest.guest.guest_last_name == "Miller"
assert conversion_with_guest.guest.guest_email == "david@example.com"
# Verify conversion_room was created
room_result = await test_db_session.execute(
select(ConversionRoom).where(
ConversionRoom.conversion_id == conversion.id
class TestRegularGuestClassification:
"""Tests for the classify_regular_guests helper."""
@pytest.mark.asyncio
async def test_classify_regular_guest_with_unattributable_history(
self, test_db_session
):
"""Guests with unattributable paying stays become regulars."""
from tests.helpers import MultiReservationXMLBuilder, ReservationXMLBuilder
multi = MultiReservationXMLBuilder()
base_builder = ReservationXMLBuilder(
hotel_id="39054_001",
reservation_id="4001",
reservation_number="REG-1",
reservation_date="2025-05-01",
).set_guest(
guest_id="888",
first_name="Regular",
last_name="Guest",
email="regular@example.com",
)
base_builder.add_room(
arrival="2025-06-01",
departure="2025-06-03",
room_number="71",
status="departed",
revenue_logis_per_day=220.0,
)
multi.add_reservation(base_builder)
second = ReservationXMLBuilder(
hotel_id="39054_001",
reservation_id="4002",
reservation_number="REG-2",
reservation_date="2025-05-10",
).set_guest(
guest_id="888",
first_name="Regular",
last_name="Guest",
email="regular@example.com",
)
second.add_room(
arrival="2025-07-01",
departure="2025-07-04",
room_number="72",
status="departed",
revenue_logis_per_day=210.0,
)
multi.add_reservation(second)
service = ConversionService(test_db_session, hotel_id="39054_001")
await service.process_conversion_xml(multi.build_xml())
stats = await service.classify_regular_guests(updated_within_hours=None)
assert stats["regular"] == 1
guest = await test_db_session.execute(
select(ConversionGuest).where(
ConversionGuest.hotel_id == "39054_001",
ConversionGuest.guest_id == 888,
)
)
rooms = room_result.scalars().all()
assert len(rooms) > 0, "ConversionRoom should be created"
guest_record = guest.scalar_one()
assert guest_record.is_regular is True
assert guest_record.is_awareness_guest is False
# Verify matching occurred (may or may not have matched depending on data)
# The important thing is that the records exist
assert stats["total_reservations"] == 1
assert stats["total_daily_sales"] == 1
@pytest.mark.asyncio
async def test_classify_awareness_guest_when_first_stay_attributable(
self, test_db_session
):
"""If the earliest paying stay is attributable, mark awareness guests."""
from tests.helpers import MultiReservationXMLBuilder, ReservationXMLBuilder
multi = MultiReservationXMLBuilder()
first = ReservationXMLBuilder(
hotel_id="39054_001",
reservation_id="4101",
reservation_number="AW-1",
reservation_date="2025-08-01",
).set_guest(
guest_id="889",
first_name="Aware",
last_name="Guest",
email="aware@example.com",
)
first.add_room(
arrival="2025-09-01",
departure="2025-09-03",
room_number="81",
status="departed",
revenue_logis_per_day=250.0,
)
multi.add_reservation(first)
second = ReservationXMLBuilder(
hotel_id="39054_001",
reservation_id="4102",
reservation_number="AW-2",
reservation_date="2025-08-10",
).set_guest(
guest_id="889",
first_name="Aware",
last_name="Guest",
email="aware@example.com",
)
second.add_room(
arrival="2025-10-05",
departure="2025-10-08",
room_number="82",
status="departed",
revenue_logis_per_day=260.0,
)
multi.add_reservation(second)
service = ConversionService(test_db_session, hotel_id="39054_001")
await service.process_conversion_xml(multi.build_xml())
# Mark earliest stay as attributable to simulate campaign match
result = await test_db_session.execute(
select(Conversion)
.where(
Conversion.hotel_id == "39054_001",
Conversion.guest_id == 889,
)
.order_by(Conversion.reservation_date.asc())
)
conversions = result.scalars().all()
conversions[0].directly_attributable = True
conversions[1].directly_attributable = False
await test_db_session.commit()
stats = await service.classify_regular_guests(updated_within_hours=None)
assert stats["regular"] == 1
assert stats["awareness"] == 1
guest = await test_db_session.execute(
select(ConversionGuest).where(
ConversionGuest.hotel_id == "39054_001",
ConversionGuest.guest_id == 889,
)
)
guest_record = guest.scalar_one()
assert guest_record.is_regular is True
assert guest_record.is_awareness_guest is True
@pytest.mark.asyncio
async def test_conversion_guest_composite_key_prevents_duplicates(
@@ -819,7 +1141,7 @@ class TestHashedMatchingLogic:
Now the database itself enforces uniqueness at the PK level.
"""
hotel_id = "test_hotel"
guest_id = "guest_123"
guest_id = 123
# Create and commit first conversion guest
guest1 = ConversionGuest.create_from_conversion_data(
@@ -862,6 +1184,7 @@ class TestHashedMatchingLogic:
# The composite PK constraint prevents the duplicate insert
from sqlalchemy.exc import IntegrityError
with pytest.raises(IntegrityError):
await test_db_session.commit()

View File

@@ -6,7 +6,7 @@ from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
from alpine_bits_python.customer_service import CustomerService
from alpine_bits_python.db import Base, Customer, HashedCustomer
from alpine_bits_python.db import Base, Customer
@pytest_asyncio.fixture
@@ -42,9 +42,9 @@ async def test_create_customer_creates_hashed_version(async_session: AsyncSessio
assert customer.given_name == "John"
# Check that hashed version was created
hashed = await service.get_hashed_customer(customer.id)
hashed = await service.get_customer(customer.id)
assert hashed is not None
assert hashed.customer_id == customer.id
assert hashed.id == customer.id
assert hashed.hashed_email is not None
assert hashed.hashed_phone is not None
assert hashed.hashed_given_name is not None
@@ -66,7 +66,7 @@ async def test_update_customer_updates_hashed_version(async_session: AsyncSessio
customer = await service.create_customer(customer_data)
# Get initial hashed email
hashed = await service.get_hashed_customer(customer.id)
hashed = await service.get_customer(customer.id)
original_hashed_email = hashed.hashed_email
# Update customer email
@@ -74,7 +74,7 @@ async def test_update_customer_updates_hashed_version(async_session: AsyncSessio
updated_customer = await service.update_customer(customer, update_data)
# Check that hashed version was updated
updated_hashed = await service.get_hashed_customer(updated_customer.id)
updated_hashed = await service.get_customer(updated_customer.id)
assert updated_hashed.hashed_email != original_hashed_email
@@ -95,7 +95,7 @@ async def test_get_or_create_customer_creates_new(async_session: AsyncSession):
assert customer.contact_id == "new123"
# Verify hashed version exists
hashed = await service.get_hashed_customer(customer.id)
hashed = await service.get_customer(customer.id)
assert hashed is not None
@@ -145,10 +145,13 @@ async def test_hash_existing_customers_backfills(async_session: AsyncSession):
# Verify no hashed version exists
result = await async_session.execute(
select(HashedCustomer).where(HashedCustomer.customer_id == customer.id)
select(Customer).where(Customer.id == customer.id)
)
hashed = result.scalar_one_or_none()
assert hashed is None
assert hashed, "Customer should exist."
assert hashed.hashed_given_name is None, "Hashed given name should be None."
assert hashed.hashed_email is None, "Hashed email should be None."
# Run backfill
service = CustomerService(async_session)
@@ -158,11 +161,12 @@ async def test_hash_existing_customers_backfills(async_session: AsyncSession):
# Verify hashed version now exists
result = await async_session.execute(
select(HashedCustomer).where(HashedCustomer.customer_id == customer.id)
select(Customer).where(Customer.id == customer.id)
)
hashed = result.scalar_one_or_none()
assert hashed is not None
assert hashed.hashed_email is not None
assert hashed is not None, "Customer should still exist after backfill."
assert hashed.hashed_email is not None, "Hashed email should be populated."
assert hashed.hashed_given_name is not None, "Hashed given name should be populated."
@pytest.mark.asyncio
@@ -201,7 +205,7 @@ async def test_hashing_normalization(async_session: AsyncSession):
}
customer = await service.create_customer(customer_data)
hashed = await service.get_hashed_customer(customer.id)
hashed = await service.get_customer(customer.id)
# Verify hashes exist (normalization should have occurred)
assert hashed.hashed_email is not None
@@ -244,13 +248,17 @@ async def test_hash_existing_customers_normalizes_country_code(
# Verify no hashed version exists yet
result = await async_session.execute(
select(HashedCustomer).where(HashedCustomer.customer_id == customer.id)
select(Customer).where(Customer.id == customer.id)
)
hashed = result.scalar_one_or_none()
assert hashed is None
assert hashed is not None, "Customer should exist."
assert hashed.hashed_given_name is None, "Hashed given name should be None."
assert hashed.hashed_email is None, "Hashed email should be None."
assert hashed.hashed_country_code is None, "Hashed country code should be None."
# Verify the customer has the invalid country code stored in the DB
assert customer.country_code == "Italy"
assert hashed.country_code == "Italy"
# Run hash_existing_customers - this should normalize "Italy" to "IT"
# during validation and successfully create a hashed customer
@@ -263,7 +271,7 @@ async def test_hash_existing_customers_normalizes_country_code(
# Verify hashed version was created
await async_session.refresh(customer)
result = await async_session.execute(
select(HashedCustomer).where(HashedCustomer.customer_id == customer.id)
select(Customer).where(Customer.id == customer.id)
)
hashed = result.scalar_one_or_none()
assert hashed is not None
@@ -302,7 +310,7 @@ async def test_hash_existing_customers_normalizes_country_code(
# Verify hashed version was created with correct hash
result = await async_session.execute(
select(HashedCustomer).where(HashedCustomer.customer_id == customer2.id)
select(Customer).where(Customer.id == customer2.id)
)
hashed = result.scalar_one_or_none()
assert hashed is not None

View File

@@ -0,0 +1,549 @@
<?xml version="1.0" ?>
<!--
Bespielfile von Sebastian zum testen der Closing Seasons Funktionalität
-->
<OTA_HotelInvCountNotifRQ xmlns='http://www.opentravel.org/OTA/2003/05' Version='3.000' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' xsi:schemaLocation='http://www.opentravel.org/OTA/2003/05 OTA_HotelInvCountNotifRQ.xsd'>
<UniqueID Type='16' ID='1' Instance='CompleteSet'/>
<Inventories HotelCode='TESTHOTEL'>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-23' AllInvCode='1'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='106' InvTypeCode='EZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-23' InvCode='106' InvTypeCode='EZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-24' End='2026-01-31' InvCode='106' InvTypeCode='EZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-09' InvCode='107' InvTypeCode='EZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-10' End='2025-12-19' InvCode='107' InvTypeCode='EZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-23' InvCode='107' InvTypeCode='EZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-24' End='2025-12-28' InvCode='107' InvTypeCode='EZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-29' End='2026-01-04' InvCode='107' InvTypeCode='EZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2026-01-05' End='2026-01-31' InvCode='107' InvTypeCode='EZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='108' InvTypeCode='EZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-23' InvCode='108' InvTypeCode='EZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-24' End='2026-01-31' InvCode='108' InvTypeCode='EZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='206' InvTypeCode='EZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-23' InvCode='206' InvTypeCode='EZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-24' End='2026-01-31' InvCode='206' InvTypeCode='EZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='207' InvTypeCode='EZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-23' InvCode='207' InvTypeCode='EZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-24' End='2026-01-31' InvCode='207' InvTypeCode='EZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='208' InvTypeCode='EZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-23' InvCode='208' InvTypeCode='EZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-24' End='2026-01-31' InvCode='208' InvTypeCode='EZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='306' InvTypeCode='EZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-23' InvCode='306' InvTypeCode='EZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-24' End='2026-01-31' InvCode='306' InvTypeCode='EZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='307' InvTypeCode='EZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-23' InvCode='307' InvTypeCode='EZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-24' End='2026-01-31' InvCode='307' InvTypeCode='EZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='101' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-25' InvCode='101' InvTypeCode='DZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-26' End='2026-01-31' InvCode='101' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2026-01-31' InvCode='102' InvTypeCode='DZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='103' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-25' InvCode='103' InvTypeCode='DZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-26' End='2026-01-31' InvCode='103' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='104' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-25' InvCode='104' InvTypeCode='DZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-26' End='2026-01-04' InvCode='104' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2026-01-05' End='2026-01-05' InvCode='104' InvTypeCode='DZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2026-01-06' End='2026-01-31' InvCode='104' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='105' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-25' InvCode='105' InvTypeCode='DZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-26' End='2026-01-31' InvCode='105' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='201' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-25' InvCode='201' InvTypeCode='DZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-26' End='2026-01-31' InvCode='201' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='202' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-25' InvCode='202' InvTypeCode='DZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-26' End='2026-01-31' InvCode='202' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='203' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-25' InvCode='203' InvTypeCode='DZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-26' End='2026-01-31' InvCode='203' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='204' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-25' InvCode='204' InvTypeCode='DZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-26' End='2026-01-31' InvCode='204' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='205' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2026-01-05' InvCode='205' InvTypeCode='DZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2026-01-06' End='2026-01-31' InvCode='205' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='301' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-25' InvCode='301' InvTypeCode='DZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-26' End='2026-01-31' InvCode='301' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='302' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-25' InvCode='302' InvTypeCode='DZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-26' End='2026-01-31' InvCode='302' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='303' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-25' InvCode='303' InvTypeCode='DZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-26' End='2026-01-31' InvCode='303' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='304' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-25' InvCode='304' InvTypeCode='DZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-26' End='2026-01-31' InvCode='304' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='305' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-23' InvCode='305' InvTypeCode='DZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-24' End='2026-01-31' InvCode='305' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='501' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-23' InvCode='501' InvTypeCode='DZ'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-24' End='2026-01-31' InvCode='501' InvTypeCode='DZ'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-11' InvCode='109' InvTypeCode='SUI'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-12' End='2025-12-24' InvCode='109' InvTypeCode='SUI'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-25' End='2025-12-25' InvCode='109' InvTypeCode='SUI'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-26' End='2025-12-26' InvCode='109' InvTypeCode='SUI'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-27' End='2026-01-13' InvCode='109' InvTypeCode='SUI'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2026-01-14' End='2026-01-14' InvCode='109' InvTypeCode='SUI'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2026-01-15' End='2026-01-31' InvCode='109' InvTypeCode='SUI'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-16' InvCode='110' InvTypeCode='SUI'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-17' End='2025-12-23' InvCode='110' InvTypeCode='SUI'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-24' End='2026-01-31' InvCode='110' InvTypeCode='SUI'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-17' InvCode='209' InvTypeCode='SUI'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-18' End='2025-12-23' InvCode='209' InvTypeCode='SUI'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-24' End='2026-01-31' InvCode='209' InvTypeCode='SUI'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='210' InvTypeCode='SUI'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-23' InvCode='210' InvTypeCode='SUI'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-24' End='2026-01-31' InvCode='210' InvTypeCode='SUI'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='309' InvTypeCode='SUI'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-23' InvCode='309' InvTypeCode='SUI'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-24' End='2026-01-31' InvCode='309' InvTypeCode='SUI'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='310' InvTypeCode='SUI'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-23' InvCode='310' InvTypeCode='SUI'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-24' End='2026-01-31' InvCode='310' InvTypeCode='SUI'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='401' InvTypeCode='FW'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-23' InvCode='401' InvTypeCode='FW'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-24' End='2026-01-31' InvCode='401' InvTypeCode='FW'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='402' InvTypeCode='FW'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-23' InvCode='402' InvTypeCode='FW'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-24' End='2026-01-31' InvCode='402' InvTypeCode='FW'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='403' InvTypeCode='FW'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-23' InvCode='403' InvTypeCode='FW'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-24' End='2026-01-31' InvCode='403' InvTypeCode='FW'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-09' End='2025-12-19' InvCode='308' InvTypeCode='COD'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-20' End='2025-12-23' InvCode='308' InvTypeCode='COD'/>
</Inventory>
<Inventory>
<StatusApplicationControl Start='2025-12-24' End='2026-01-31' InvCode='308' InvTypeCode='COD'/>
<InvCounts>
<InvCount CountType='2' Count='1'/>
</InvCounts>
</Inventory>
</Inventories>
</OTA_HotelInvCountNotifRQ>

View File

@@ -1,53 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
AlpineBits 2024-10
https://www.alpinebits.org/
Sample message file for a Handshake response
Changelog:
v. 2024-10 1.2 Example extended with all capabilities and two supported releases
v. 2024-10 1.1 Removed the OTA_Ping action
v. 2024-10 1.0 added supported version 2024-10 in the example
v. 2018-10 1.0 initial example
-->
<OTA_PingRS xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://www.opentravel.org/OTA/2003/05"
xsi:schemaLocation="http://www.opentravel.org/OTA/2003/05 OTA_PingRS.xsd"
Version="8.000">
<Success/>
<OTA_PingRS
xmlns="http://www.opentravel.org/OTA/2003/05" Version="7.000">
<Success/>
<Warnings>
<Warning Type="11" Status="ALPINEBITS_HANDSHAKE">{
"versions": [
{
"version": "2024-10",
"actions": [
{
"action": "action_OTA_Read"
},
{
"action": "action_OTA_HotelResNotif_GuestRequests"
}
]
},
{
"version": "2022-10",
"actions": [
{
"action": "action_OTA_Ping"
},
{
"action": "action_OTA_Read"
},
{
"action": "action_OTA_HotelResNotif_GuestRequests"
}
]
}
]
}</Warning>
<Warning Type="11" Status="ALPINEBITS_HANDSHAKE">{"versions": [{"version": "2024-10", "actions": [{"action": "action_OTA_Read"}, {"action": "action_OTA_HotelResNotif_GuestRequests"}, {"action": "action_OTA_HotelInvCountNotif", "supports": ["OTA_HotelInvCountNotif_accept_rooms", "OTA_HotelInvCountNotif_accept_categories", "OTA_HotelInvCountNotif_accept_deltas", "OTA_HotelInvCountNotif_accept_complete_set", "OTA_HotelInvCountNotif_accept_out_of_order", "OTA_HotelInvCountNotif_accept_out_of_market", "OTA_HotelInvCountNotif_accept_closing_seasons"]}]}, {"version": "2022-10", "actions": [{"action": "action_OTA_Ping"}, {"action": "action_OTA_Read"}, {"action": "action_OTA_HotelResNotif_GuestRequests"}, {"action": "action_OTA_HotelInvCountNotif", "supports": ["OTA_HotelInvCountNotif_accept_rooms", "OTA_HotelInvCountNotif_accept_categories", "OTA_HotelInvCountNotif_accept_deltas", "OTA_HotelInvCountNotif_accept_complete_set", "OTA_HotelInvCountNotif_accept_out_of_order", "OTA_HotelInvCountNotif_accept_out_of_market", "OTA_HotelInvCountNotif_accept_closing_seasons"]}]}]}</Warning>
</Warnings>
<EchoData>
{

View File

@@ -3,6 +3,7 @@
from __future__ import annotations
from datetime import UTC, datetime
from pathlib import Path
import pytest
import pytest_asyncio
@@ -12,6 +13,7 @@ from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_asyn
from alpine_bits_python.alpinebits_server import AlpineBitsClientInfo, Version
from alpine_bits_python.const import HttpStatusCode
from alpine_bits_python.db import Base, Hotel, HotelInventory, RoomAvailability
from alpine_bits_python.hotel_service import hash_password
from alpine_bits_python.free_rooms_action import FreeRoomsAction
@@ -78,7 +80,7 @@ async def insert_test_hotel(session: AsyncSession, hotel_id: str = "TESTHOTEL"):
hotel_id=hotel_id,
hotel_name="Unit Test Hotel",
username="testuser",
password_hash="bcrypt-hash",
password_hash=hash_password("testpass"),
created_at=datetime.now(UTC),
updated_at=datetime.now(UTC),
is_active=True,
@@ -123,7 +125,7 @@ async def test_complete_set_creates_inventory_and_availability(db_session: Async
)
).scalars().all()
assert len(rows) == 3
assert rows[0].count_type_2 == 4
assert rows[0].bookable_type_2 == 4
assert rows[0].update_type == "CompleteSet"
@@ -156,7 +158,7 @@ async def test_complete_set_replaces_previous_availability(db_session: AsyncSess
).scalars().all()
assert len(rows) == 1
assert rows[0].date.isoformat() == "2025-02-01"
assert rows[0].count_type_2 == 1
assert rows[0].bookable_type_2 == 1
@pytest.mark.asyncio
@@ -185,7 +187,7 @@ async def test_delta_updates_only_specified_dates(db_session: AsyncSession):
rows = (
await db_session.execute(select(RoomAvailability).order_by(RoomAvailability.date))
).scalars().all()
counts = {row.date.isoformat(): row.count_type_2 for row in rows}
counts = {row.date.isoformat(): row.bookable_type_2 for row in rows}
assert counts == {
"2025-03-01": 2,
"2025-03-02": 7,
@@ -228,7 +230,37 @@ async def test_closing_season_entries_marked_correctly(db_session: AsyncSession)
).scalars().all()
closing_rows = [row for row in rows if row.is_closing_season]
assert len(closing_rows) == 2
assert all(row.count_type_2 is None for row in closing_rows)
assert all(row.bookable_type_2 is None for row in closing_rows)
@pytest.mark.asyncio
async def test_closing_seasons_test_file(db_session: AsyncSession):
await insert_test_hotel(db_session)
action = make_action()
Path(__file__).parent / "test_data" / "ClosingSeasons.xml"
xml = (Path(__file__).parent / "test_data" / "ClosingSeasons.xml").read_text()
response = await action.handle(
"OTA_HotelInvCountNotif:FreeRooms",
xml,
Version.V2024_10,
make_client_info(),
db_session,
)
assert response.status_code == HttpStatusCode.OK, f"Response was not OK {response.xml_content}"
inventories = (await db_session.execute(select(HotelInventory))).scalars().all()
closing_inventory = next(inv for inv in inventories if inv.inv_type_code == "__CLOSE")
assert closing_inventory.inv_code is None
rows = (
await db_session.execute(select(RoomAvailability).order_by(RoomAvailability.date))
).scalars().all()
closing_rows = [row for row in rows if row.is_closing_season]
# Closing season from 2025-12-20 to 2025-12-23 = 4 days
assert len(closing_rows) == 4
assert all(row.bookable_type_2 is None for row in closing_rows)
@pytest.mark.asyncio
@@ -365,3 +397,696 @@ async def test_invalid_xml_returns_error(db_session: AsyncSession):
)
assert response.status_code == HttpStatusCode.BAD_REQUEST
assert "Invalid XML payload" in response.xml_content
@pytest.mark.asyncio
async def test_mixing_categories_and_rooms_is_rejected(db_session: AsyncSession):
await insert_test_hotel(db_session)
action = make_action()
# First inventory is a category (no InvCode), second is an individual room (with InvCode)
xml = build_complete_set_xml(
"""
<Inventory>
<StatusApplicationControl Start="2025-08-01" End="2025-08-10" InvTypeCode="DOUBLE" />
<InvCounts>
<InvCount CountType="2" Count="3" />
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start="2025-08-21" End="2025-08-30" InvTypeCode="DOUBLE" InvCode="108" />
<InvCounts>
<InvCount CountType="2" Count="1" />
</InvCounts>
</Inventory>
"""
)
response = await action.handle(
"OTA_HotelInvCountNotif:FreeRooms",
xml,
Version.V2024_10,
make_client_info(),
db_session,
)
assert response.status_code == HttpStatusCode.BAD_REQUEST
assert "Mixing room categories and individual rooms" in response.xml_content
@pytest.mark.asyncio
async def test_mixing_rooms_and_categories_is_rejected(db_session: AsyncSession):
await insert_test_hotel(db_session)
action = make_action()
# First inventory is an individual room (with InvCode), second is a category (no InvCode)
xml = build_complete_set_xml(
"""
<Inventory>
<StatusApplicationControl Start="2025-08-21" End="2025-08-30" InvTypeCode="DOUBLE" InvCode="108" />
<InvCounts>
<InvCount CountType="2" Count="1" />
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start="2025-08-01" End="2025-08-10" InvTypeCode="DOUBLE" />
<InvCounts>
<InvCount CountType="2" Count="3" />
</InvCounts>
</Inventory>
"""
)
response = await action.handle(
"OTA_HotelInvCountNotif:FreeRooms",
xml,
Version.V2024_10,
make_client_info(),
db_session,
)
assert response.status_code == HttpStatusCode.BAD_REQUEST
assert "Mixing room categories and individual rooms" in response.xml_content
@pytest.mark.asyncio
async def test_multiple_categories_are_allowed(db_session: AsyncSession):
await insert_test_hotel(db_session)
action = make_action()
# Multiple category reports (all without InvCode) should be allowed
xml = build_complete_set_xml(
"""
<Inventory>
<StatusApplicationControl Start="2025-08-01" End="2025-08-10" InvTypeCode="DOUBLE" />
<InvCounts>
<InvCount CountType="2" Count="3" />
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start="2025-08-11" End="2025-08-20" InvTypeCode="SINGLE" />
<InvCounts>
<InvCount CountType="2" Count="2" />
</InvCounts>
</Inventory>
"""
)
response = await action.handle(
"OTA_HotelInvCountNotif:FreeRooms",
xml,
Version.V2024_10,
make_client_info(),
db_session,
)
assert response.status_code == HttpStatusCode.OK
@pytest.mark.asyncio
async def test_multiple_rooms_are_allowed(db_session: AsyncSession):
await insert_test_hotel(db_session)
action = make_action()
# Multiple individual room reports (all with InvCode) should be allowed
xml = build_complete_set_xml(
"""
<Inventory>
<StatusApplicationControl Start="2025-08-01" End="2025-08-10" InvTypeCode="DOUBLE" InvCode="101" />
<InvCounts>
<InvCount CountType="2" Count="1" />
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start="2025-08-11" End="2025-08-20" InvTypeCode="DOUBLE" InvCode="102" />
<InvCounts>
<InvCount CountType="2" Count="1" />
</InvCounts>
</Inventory>
"""
)
response = await action.handle(
"OTA_HotelInvCountNotif:FreeRooms",
xml,
Version.V2024_10,
make_client_info(),
db_session,
)
assert response.status_code == HttpStatusCode.OK
@pytest.mark.asyncio
async def test_closing_season_with_categories_is_allowed(db_session: AsyncSession):
await insert_test_hotel(db_session)
action = make_action()
# Closing season followed by category reports should be allowed
xml = build_complete_set_xml(
"""
<Inventory>
<StatusApplicationControl Start="2025-04-01" End="2025-04-02" AllInvCode="true"/>
</Inventory>
<Inventory>
<StatusApplicationControl Start="2025-04-03" End="2025-04-10" InvTypeCode="DOUBLE" />
<InvCounts>
<InvCount CountType="2" Count="3" />
</InvCounts>
</Inventory>
"""
)
response = await action.handle(
"OTA_HotelInvCountNotif:FreeRooms",
xml,
Version.V2024_10,
make_client_info(),
db_session,
)
assert response.status_code == HttpStatusCode.OK
@pytest.mark.asyncio
async def test_closing_season_with_rooms_is_allowed(db_session: AsyncSession):
await insert_test_hotel(db_session)
action = make_action()
# Closing season followed by individual room reports should be allowed
xml = build_complete_set_xml(
"""
<Inventory>
<StatusApplicationControl Start="2025-04-01" End="2025-04-02" AllInvCode="true"/>
</Inventory>
<Inventory>
<StatusApplicationControl Start="2025-04-03" End="2025-04-10" InvTypeCode="DOUBLE" InvCode="101" />
<InvCounts>
<InvCount CountType="2" Count="1" />
</InvCounts>
</Inventory>
"""
)
response = await action.handle(
"OTA_HotelInvCountNotif:FreeRooms",
xml,
Version.V2024_10,
make_client_info(),
db_session,
)
assert response.status_code == HttpStatusCode.OK
@pytest.mark.asyncio
async def test_complete_set_with_single_empty_inventory_resets_all_availability(
db_session: AsyncSession,
):
"""Test the special case: CompleteSet with one empty Inventory element to reset all availability.
According to AlpineBits spec, to completely reset all room availability information for a hotel,
a client can send a CompleteSet request with just one empty Inventory element without any
attributes. This is the only exception to the rule that StatusApplicationControl is required.
"""
await insert_test_hotel(db_session)
action = make_action()
# First, add some availability data
initial_xml = build_complete_set_xml(
daily_inventory("2025-01-01", "2025-01-05", inv_type="DBL", count=10)
)
await action.handle(
"OTA_HotelInvCountNotif:FreeRooms",
initial_xml,
Version.V2024_10,
make_client_info(),
db_session,
)
# Verify data was created
rows_before = (await db_session.execute(select(RoomAvailability))).scalars().all()
assert len(rows_before) == 5
inventory_before = (await db_session.execute(select(HotelInventory))).scalars().all()
assert len(inventory_before) == 1
assert inventory_before[0].source == "FreeRooms"
# Now send the special reset request with empty Inventory element
reset_xml = build_complete_set_xml("<Inventory/>")
response = await action.handle(
"OTA_HotelInvCountNotif:FreeRooms",
reset_xml,
Version.V2024_10,
make_client_info(),
db_session,
)
# Should succeed
assert response.status_code == HttpStatusCode.OK
# All availability and FreeRooms-sourced inventory should be cleared
rows_after = (await db_session.execute(select(RoomAvailability))).scalars().all()
assert len(rows_after) == 0
inventory_after = (await db_session.execute(select(HotelInventory))).scalars().all()
assert len(inventory_after) == 0
@pytest.mark.asyncio
async def test_delta_with_empty_inventory_is_rejected(db_session: AsyncSession):
"""Test that empty Inventory is only allowed for CompleteSet, not Delta."""
await insert_test_hotel(db_session)
action = make_action()
xml = build_delta_xml("<Inventory/>")
response = await action.handle(
"OTA_HotelInvCountNotif:FreeRooms",
xml,
Version.V2024_10,
make_client_info(),
db_session,
)
# Delta requests cannot use empty Inventory
assert response.status_code == HttpStatusCode.BAD_REQUEST
assert "StatusApplicationControl element is required" in response.xml_content
@pytest.mark.asyncio
async def test_complete_set_with_multiple_empty_inventories_is_rejected(
db_session: AsyncSession,
):
"""Test that the empty Inventory exception only applies to a single empty Inventory."""
await insert_test_hotel(db_session)
action = make_action()
# Multiple empty Inventory elements should not be allowed
xml = build_complete_set_xml("<Inventory/><Inventory/>")
response = await action.handle(
"OTA_HotelInvCountNotif:FreeRooms",
xml,
Version.V2024_10,
make_client_info(),
db_session,
)
# Should fail because the special case only applies to a single empty Inventory
assert response.status_code == HttpStatusCode.BAD_REQUEST
assert "StatusApplicationControl element is required" in response.xml_content
@pytest.mark.asyncio
async def test_complete_set_preserves_inventory_from_other_sources(db_session: AsyncSession):
"""Test that CompleteSet only deletes FreeRooms-sourced inventory, not inventory from other sources."""
await insert_test_hotel(db_session)
action = make_action()
# First, add some FreeRooms inventory
freerooms_xml = build_complete_set_xml(
daily_inventory("2025-01-01", "2025-01-05", inv_type="DBL", count=10)
)
await action.handle(
"OTA_HotelInvCountNotif:FreeRooms",
freerooms_xml,
Version.V2024_10,
make_client_info(),
db_session,
)
# Manually add inventory from another source (simulating HotelInventory endpoint)
other_inventory = HotelInventory(
hotel_id="TESTHOTEL",
inv_type_code="SGL",
inv_code=None,
source="HotelInventory",
first_seen=datetime.now(UTC),
last_updated=datetime.now(UTC),
)
db_session.add(other_inventory)
await db_session.commit()
# Verify both inventory items exist
inventory_before = (
await db_session.execute(select(HotelInventory).order_by(HotelInventory.source))
).scalars().all()
assert len(inventory_before) == 2
assert inventory_before[0].source == "FreeRooms"
assert inventory_before[1].source == "HotelInventory"
# Send a new CompleteSet with different data
new_xml = build_complete_set_xml(
daily_inventory("2025-01-01", "2025-01-03", inv_type="TRIPLE", count=5)
)
response = await action.handle(
"OTA_HotelInvCountNotif:FreeRooms",
new_xml,
Version.V2024_10,
make_client_info(),
db_session,
)
assert response.status_code == HttpStatusCode.OK
# Check inventory: FreeRooms inventory should be replaced, but HotelInventory source should remain
inventory_after = (
await db_session.execute(select(HotelInventory).order_by(HotelInventory.source))
).scalars().all()
assert len(inventory_after) == 2
# New FreeRooms inventory
assert inventory_after[0].source == "FreeRooms"
assert inventory_after[0].inv_type_code == "TRIPLE"
# Preserved HotelInventory source
assert inventory_after[1].source == "HotelInventory"
assert inventory_after[1].inv_type_code == "SGL"
@pytest.mark.asyncio
async def test_closing_season_overlapping_with_inventory_is_rejected(db_session: AsyncSession):
"""Test that closing seasons cannot overlap with regular inventory entries."""
await insert_test_hotel(db_session)
action = make_action()
# Closing season from July 31 to Sept 30, with inventory from Aug 1-10 (overlaps!)
xml = build_complete_set_xml(
"""
<Inventory>
<StatusApplicationControl Start="2022-07-31" End="2022-09-30" AllInvCode="true" />
</Inventory>
<Inventory>
<StatusApplicationControl Start="2022-08-01" End="2022-08-10" InvTypeCode="DOUBLE" />
<InvCounts>
<InvCount CountType="2" Count="3" />
</InvCounts>
</Inventory>
"""
)
response = await action.handle(
"OTA_HotelInvCountNotif:FreeRooms",
xml,
Version.V2024_10,
make_client_info(),
db_session,
)
assert response.status_code == HttpStatusCode.BAD_REQUEST
assert "overlaps with closing season" in response.xml_content
@pytest.mark.asyncio
async def test_overlapping_closing_seasons_are_rejected(db_session: AsyncSession):
"""Test that multiple closing seasons cannot overlap with each other."""
await insert_test_hotel(db_session)
action = make_action()
# Two overlapping closing seasons
xml = build_complete_set_xml(
"""
<Inventory>
<StatusApplicationControl Start="2022-07-01" End="2022-07-31" AllInvCode="true" />
</Inventory>
<Inventory>
<StatusApplicationControl Start="2022-07-15" End="2022-08-15" AllInvCode="true" />
</Inventory>
"""
)
response = await action.handle(
"OTA_HotelInvCountNotif:FreeRooms",
xml,
Version.V2024_10,
make_client_info(),
db_session,
)
assert response.status_code == HttpStatusCode.BAD_REQUEST
assert "Closing seasons overlap" in response.xml_content
@pytest.mark.asyncio
async def test_non_overlapping_closing_seasons_are_allowed(db_session: AsyncSession):
"""Test that multiple non-overlapping closing seasons are allowed."""
await insert_test_hotel(db_session)
action = make_action()
# Two non-overlapping closing seasons
xml = build_complete_set_xml(
"""
<Inventory>
<StatusApplicationControl Start="2022-07-01" End="2022-07-15" AllInvCode="true" />
</Inventory>
<Inventory>
<StatusApplicationControl Start="2022-08-01" End="2022-08-15" AllInvCode="true" />
</Inventory>
<Inventory>
<StatusApplicationControl Start="2022-07-16" End="2022-07-31" InvTypeCode="DOUBLE" />
<InvCounts>
<InvCount CountType="2" Count="5" />
</InvCounts>
</Inventory>
"""
)
response = await action.handle(
"OTA_HotelInvCountNotif:FreeRooms",
xml,
Version.V2024_10,
make_client_info(),
db_session,
)
assert response.status_code == HttpStatusCode.OK
# Verify closing seasons were created
rows = (
await db_session.execute(
select(RoomAvailability).where(RoomAvailability.is_closing_season.is_(True))
)
).scalars().all()
# 15 days in July + 15 days in August = 30 closing season days
assert len(rows) == 30
@pytest.mark.asyncio
async def test_adjacent_closing_season_and_inventory_are_allowed(db_session: AsyncSession):
"""Test that closing seasons and inventory can be adjacent (not overlapping) without error."""
await insert_test_hotel(db_session)
action = make_action()
# Closing season ends July 31, inventory starts Aug 1 (adjacent, not overlapping)
xml = build_complete_set_xml(
"""
<Inventory>
<StatusApplicationControl Start="2022-07-01" End="2022-07-31" AllInvCode="true" />
</Inventory>
<Inventory>
<StatusApplicationControl Start="2022-08-01" End="2022-08-10" InvTypeCode="DOUBLE" />
<InvCounts>
<InvCount CountType="2" Count="3" />
</InvCounts>
</Inventory>
"""
)
response = await action.handle(
"OTA_HotelInvCountNotif:FreeRooms",
xml,
Version.V2024_10,
make_client_info(),
db_session,
)
assert response.status_code == HttpStatusCode.OK
@pytest.mark.asyncio
async def test_overlapping_inventory_for_same_category_is_rejected(db_session: AsyncSession):
"""Test that overlapping date ranges for the same room category are rejected."""
await insert_test_hotel(db_session)
action = make_action()
# Two overlapping date ranges for DOUBLE category
xml = build_complete_set_xml(
"""
<Inventory>
<StatusApplicationControl Start="2022-08-01" End="2022-08-10" InvTypeCode="DOUBLE" />
<InvCounts>
<InvCount CountType="2" Count="3" />
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start="2022-08-11" End="2022-08-20" InvTypeCode="DOUBLE" />
<InvCounts>
<InvCount CountType="2" Count="5" />
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start="2022-08-15" End="2022-08-30" InvTypeCode="DOUBLE" />
<InvCounts>
<InvCount CountType="2" Count="3" />
</InvCounts>
</Inventory>
"""
)
response = await action.handle(
"OTA_HotelInvCountNotif:FreeRooms",
xml,
Version.V2024_10,
make_client_info(),
db_session,
)
assert response.status_code == HttpStatusCode.BAD_REQUEST
assert "Overlapping date ranges for category 'DOUBLE'" in response.xml_content
@pytest.mark.asyncio
async def test_overlapping_inventory_for_same_room_is_rejected(db_session: AsyncSession):
"""Test that overlapping date ranges for the same individual room are rejected."""
await insert_test_hotel(db_session)
action = make_action()
# Two overlapping date ranges for room 101
xml = build_complete_set_xml(
"""
<Inventory>
<StatusApplicationControl Start="2022-08-01" End="2022-08-15" InvTypeCode="DOUBLE" InvCode="101" />
<InvCounts>
<InvCount CountType="2" Count="1" />
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start="2022-08-10" End="2022-08-20" InvTypeCode="DOUBLE" InvCode="101" />
<InvCounts>
<InvCount CountType="2" Count="1" />
</InvCounts>
</Inventory>
"""
)
response = await action.handle(
"OTA_HotelInvCountNotif:FreeRooms",
xml,
Version.V2024_10,
make_client_info(),
db_session,
)
assert response.status_code == HttpStatusCode.BAD_REQUEST
assert "Overlapping date ranges for room '101'" in response.xml_content
@pytest.mark.asyncio
async def test_non_overlapping_inventory_for_same_category_is_allowed(db_session: AsyncSession):
"""Test that non-overlapping date ranges for the same category are allowed."""
await insert_test_hotel(db_session)
action = make_action()
# Three non-overlapping date ranges for DOUBLE category
xml = build_complete_set_xml(
"""
<Inventory>
<StatusApplicationControl Start="2022-08-01" End="2022-08-10" InvTypeCode="DOUBLE" />
<InvCounts>
<InvCount CountType="2" Count="3" />
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start="2022-08-11" End="2022-08-20" InvTypeCode="DOUBLE" />
<InvCounts>
<InvCount CountType="2" Count="5" />
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start="2022-08-21" End="2022-08-30" InvTypeCode="DOUBLE" />
<InvCounts>
<InvCount CountType="2" Count="3" />
</InvCounts>
</Inventory>
"""
)
response = await action.handle(
"OTA_HotelInvCountNotif:FreeRooms",
xml,
Version.V2024_10,
make_client_info(),
db_session,
)
assert response.status_code == HttpStatusCode.OK
# Verify all dates were created
rows = (
await db_session.execute(
select(RoomAvailability).order_by(RoomAvailability.date)
)
).scalars().all()
assert len(rows) == 30 # Aug 1-30
@pytest.mark.asyncio
async def test_overlapping_inventory_for_different_categories_is_allowed(db_session: AsyncSession):
"""Test that overlapping dates for different room categories are allowed."""
await insert_test_hotel(db_session)
action = make_action()
# Overlapping dates but for different categories (DOUBLE vs SINGLE)
xml = build_complete_set_xml(
"""
<Inventory>
<StatusApplicationControl Start="2022-08-01" End="2022-08-15" InvTypeCode="DOUBLE" />
<InvCounts>
<InvCount CountType="2" Count="3" />
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start="2022-08-10" End="2022-08-20" InvTypeCode="SINGLE" />
<InvCounts>
<InvCount CountType="2" Count="2" />
</InvCounts>
</Inventory>
"""
)
response = await action.handle(
"OTA_HotelInvCountNotif:FreeRooms",
xml,
Version.V2024_10,
make_client_info(),
db_session,
)
assert response.status_code == HttpStatusCode.OK
@pytest.mark.asyncio
async def test_overlapping_inventory_for_different_rooms_is_allowed(db_session: AsyncSession):
"""Test that overlapping dates for different individual rooms are allowed."""
await insert_test_hotel(db_session)
action = make_action()
# Overlapping dates but for different rooms (101 vs 102)
xml = build_complete_set_xml(
"""
<Inventory>
<StatusApplicationControl Start="2022-08-01" End="2022-08-15" InvTypeCode="DOUBLE" InvCode="101" />
<InvCounts>
<InvCount CountType="2" Count="1" />
</InvCounts>
</Inventory>
<Inventory>
<StatusApplicationControl Start="2022-08-10" End="2022-08-20" InvTypeCode="DOUBLE" InvCode="102" />
<InvCounts>
<InvCount CountType="2" Count="1" />
</InvCounts>
</Inventory>
"""
)
response = await action.handle(
"OTA_HotelInvCountNotif:FreeRooms",
xml,
Version.V2024_10,
make_client_info(),
db_session,
)
assert response.status_code == HttpStatusCode.OK

View File

@@ -23,6 +23,7 @@ from alpine_bits_python.api import app
from alpine_bits_python.const import WebhookStatus
from alpine_bits_python.db import Base, Reservation, WebhookRequest
from alpine_bits_python.db_setup import reprocess_stuck_webhooks
from alpine_bits_python.hotel_service import hash_password
from alpine_bits_python.schemas import WebhookRequestData
from alpine_bits_python.webhook_processor import initialize_webhook_processors, webhook_registry
@@ -206,7 +207,7 @@ class TestWebhookReprocessing:
hotel_id="HOTEL123",
hotel_name="Test Hotel",
username="testuser",
password_hash="dummy",
password_hash=hash_password("testpass"),
created_at=datetime.now(UTC),
updated_at=datetime.now(UTC),
is_active=True,
@@ -291,7 +292,7 @@ class TestWebhookReprocessingNeverBlocksStartup:
hotel_id="HOTEL123",
hotel_name="Test Hotel",
username="testuser",
password_hash="dummy",
password_hash=hash_password("testpass"),
created_at=datetime.now(UTC),
updated_at=datetime.now(UTC),
is_active=True,

View File

@@ -95,7 +95,7 @@ class TestReservationXMLBuilder:
reservation_date="2025-11-14",
)
builder.set_guest(
guest_id="guest_001",
guest_id="1001",
first_name="John",
last_name="Doe",
email="john@example.com",
@@ -138,7 +138,7 @@ class TestReservationXMLBuilder:
reservation_date="2025-11-14",
)
builder.set_guest(
guest_id="guest_001",
guest_id="1001",
first_name="John",
last_name="Doe",
email="john@example.com",
@@ -179,7 +179,7 @@ class TestReservationXMLBuilder:
advertising_campagne="EAIaIQobChMI...",
)
builder.set_guest(
guest_id="guest_001",
guest_id="1001",
first_name="John",
last_name="Doe",
email="john@example.com",
@@ -213,7 +213,7 @@ class TestMultiReservationXMLBuilder:
reservation_date="2025-11-14",
)
res1.set_guest(
guest_id="guest_001",
guest_id="1001",
first_name="John",
last_name="Doe",
email="john@example.com",
@@ -233,7 +233,7 @@ class TestMultiReservationXMLBuilder:
reservation_date="2025-11-15",
)
res2.set_guest(
guest_id="guest_002",
guest_id="1002",
first_name="Jane",
last_name="Smith",
email="jane@example.com",
@@ -268,7 +268,7 @@ class TestConvenienceFeatures:
reservation_date="2025-11-14",
)
.set_guest(
guest_id="guest_001",
guest_id="1001",
first_name="John",
last_name="Doe",
email="john@example.com",
@@ -294,7 +294,7 @@ class TestConvenienceFeatures:
reservation_date="2025-11-14",
)
builder.set_guest(
guest_id="guest_001",
guest_id="1001",
first_name="John",
last_name="Doe",
email="john@example.com",

11
uv.lock generated
View File

@@ -42,6 +42,7 @@ dependencies = [
{ name = "fast-langdetect" },
{ name = "fastapi" },
{ name = "generateds" },
{ name = "git-filter-repo" },
{ name = "httpx" },
{ name = "lxml" },
{ name = "pandas" },
@@ -75,6 +76,7 @@ requires-dist = [
{ name = "fast-langdetect", specifier = ">=1.0.0" },
{ name = "fastapi", specifier = ">=0.117.1" },
{ name = "generateds", specifier = ">=2.44.3" },
{ name = "git-filter-repo", specifier = ">=2.47.0" },
{ name = "httpx", specifier = ">=0.28.1" },
{ name = "lxml", specifier = ">=6.0.1" },
{ name = "pandas", specifier = ">=2.3.3" },
@@ -516,6 +518,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b2/84/79ca1e01337fe898cd303ac8d51151b4bea4891028b93ae5bf5e9cc911a9/generateDS-2.44.3-py3-none-any.whl", hash = "sha256:ae5db7105ca777182ba6549118c9aba1690ea341400af13ffbdbfbe1bc022299", size = 147394, upload-time = "2024-10-08T21:54:34.506Z" },
]
[[package]]
name = "git-filter-repo"
version = "2.47.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/b9/bb/7a283f568af6b0528ade65e8ace84bd6ba46003e429101bcd62c232d01a5/git_filter_repo-2.47.0.tar.gz", hash = "sha256:411b27e68a080c07a69c233cb526dbc2d848b09a72f10477f4444dd0822cf290", size = 275743, upload-time = "2024-12-04T03:10:48.2Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/60/60/d3943f0880ebcb7e0bdf79254d10dddd39c7b656eeecae32b8806ff66dec/git_filter_repo-2.47.0-py3-none-any.whl", hash = "sha256:2cd04929b9024e83e65db571cbe36aec65ead0cb5f9ec5abe42158654af5ad83", size = 76282, upload-time = "2024-12-04T03:10:46.064Z" },
]
[[package]]
name = "greenlet"
version = "3.2.4"