diff --git a/Backend/.env.example b/Backend/.env.example index 688eb386..086a03ef 100644 --- a/Backend/.env.example +++ b/Backend/.env.example @@ -1,35 +1,34 @@ -# Environment -NODE_ENV=development +# Hotel Booking API - Environment Variables +# Copy this file to .env and fill in your actual values -# Server -PORT=3000 -HOST=localhost +# ============================================ +# Email/SMTP Configuration +# ============================================ +# SMTP Server Settings +SMTP_HOST=smtp.gmail.com +SMTP_PORT=587 +SMTP_USER=your-email@gmail.com +SMTP_PASSWORD=your-app-specific-password -# Database +# Email Sender Information +SMTP_FROM_EMAIL=noreply@yourdomain.com +SMTP_FROM_NAME=Hotel Booking + +# Alternative: Legacy environment variable names (for backward compatibility) +# MAIL_HOST=smtp.gmail.com +# MAIL_PORT=587 +# MAIL_USER=your-email@gmail.com +# MAIL_PASS=your-app-specific-password +# MAIL_FROM=noreply@yourdomain.com +# MAIL_SECURE=false + +# ============================================ +# Other Required Variables +# ============================================ +CLIENT_URL=http://localhost:5173 +DB_USER=root +DB_PASS=your_database_password +DB_NAME=hotel_db DB_HOST=localhost DB_PORT=3306 -DB_USER=root -DB_PASS= -DB_NAME=hotel_booking_dev - -# JWT -JWT_SECRET=your_super_secret_jwt_key_change_this_in_production -JWT_EXPIRES_IN=1h -JWT_REFRESH_SECRET=your_super_secret_refresh_key_change_this_in_production -JWT_REFRESH_EXPIRES_IN=7d - -# Client URL -CLIENT_URL=http://localhost:5173 - -# Upload -MAX_FILE_SIZE=5242880 -ALLOWED_FILE_TYPES=image/jpeg,image/png,image/jpg,image/webp - -# Pagination -DEFAULT_PAGE_SIZE=10 -MAX_PAGE_SIZE=100 - -# Rate Limiting -RATE_LIMIT_WINDOW_MS=900000 -RATE_LIMIT_MAX_REQUESTS=100 - +JWT_SECRET=your-super-secret-jwt-key-change-in-production diff --git a/Backend/alembic/__pycache__/env.cpython-312.pyc b/Backend/alembic/__pycache__/env.cpython-312.pyc new file mode 100644 index 00000000..d39f425a Binary files /dev/null and b/Backend/alembic/__pycache__/env.cpython-312.pyc differ diff --git a/Backend/alembic/env.py b/Backend/alembic/env.py index 96efcb88..2ffe86db 100644 --- a/Backend/alembic/env.py +++ b/Backend/alembic/env.py @@ -5,12 +5,17 @@ from alembic import context import os import sys from pathlib import Path +from dotenv import load_dotenv + +# Load environment variables +load_dotenv() # Add parent directory to path sys.path.insert(0, str(Path(__file__).resolve().parents[1])) # Import models and Base from src.config.database import Base +from src.config.settings import settings from src.models import * # Import all models # this is the Alembic Config object @@ -20,16 +25,8 @@ config = context.config if config.config_file_name is not None: fileConfig(config.config_file_name) -# Get database URL from environment -database_url = os.getenv("DATABASE_URL") -if not database_url: - db_user = os.getenv("DB_USER", "root") - db_pass = os.getenv("DB_PASS", "") - db_name = os.getenv("DB_NAME", "hotel_db") - db_host = os.getenv("DB_HOST", "localhost") - db_port = os.getenv("DB_PORT", "3306") - database_url = f"mysql+pymysql://{db_user}:{db_pass}@{db_host}:{db_port}/{db_name}" - +# Get database URL from settings +database_url = settings.database_url config.set_main_option("sqlalchemy.url", database_url) # add your model's MetaData object here diff --git a/Backend/alembic/versions/59baf2338f8a_initial_migration_create_all_tables_.py b/Backend/alembic/versions/59baf2338f8a_initial_migration_create_all_tables_.py new file mode 100644 index 00000000..fcb3e0d3 --- /dev/null +++ b/Backend/alembic/versions/59baf2338f8a_initial_migration_create_all_tables_.py @@ -0,0 +1,285 @@ +"""Initial migration: create all tables with indexes + +Revision ID: 59baf2338f8a +Revises: +Create Date: 2025-11-16 16:03:26.313117 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +# revision identifiers, used by Alembic. +revision = '59baf2338f8a' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('audit_logs', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('user_id', sa.Integer(), nullable=True), + sa.Column('action', sa.String(length=100), nullable=False), + sa.Column('resource_type', sa.String(length=50), nullable=False), + sa.Column('resource_id', sa.Integer(), nullable=True), + sa.Column('ip_address', sa.String(length=45), nullable=True), + sa.Column('user_agent', sa.String(length=255), nullable=True), + sa.Column('request_id', sa.String(length=36), nullable=True), + sa.Column('details', sa.JSON(), nullable=True), + sa.Column('status', sa.String(length=20), nullable=False), + sa.Column('error_message', sa.Text(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_audit_logs_action'), 'audit_logs', ['action'], unique=False) + op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False) + op.create_index(op.f('ix_audit_logs_id'), 'audit_logs', ['id'], unique=False) + op.create_index(op.f('ix_audit_logs_request_id'), 'audit_logs', ['request_id'], unique=False) + op.create_index(op.f('ix_audit_logs_resource_id'), 'audit_logs', ['resource_id'], unique=False) + op.create_index(op.f('ix_audit_logs_resource_type'), 'audit_logs', ['resource_type'], unique=False) + op.create_index(op.f('ix_audit_logs_user_id'), 'audit_logs', ['user_id'], unique=False) + op.drop_index('name', table_name='SequelizeMeta') + op.drop_table('SequelizeMeta') + op.drop_index('banners_is_active', table_name='banners') + op.drop_index('banners_position', table_name='banners') + op.create_index(op.f('ix_banners_id'), 'banners', ['id'], unique=False) + # Drop foreign keys first, then indexes + op.drop_constraint('bookings_ibfk_2', 'bookings', type_='foreignkey') + op.drop_constraint('bookings_ibfk_1', 'bookings', type_='foreignkey') + op.drop_index('booking_number', table_name='bookings') + op.drop_index('bookings_booking_number', table_name='bookings') + op.drop_index('bookings_check_in_date', table_name='bookings') + op.drop_index('bookings_check_out_date', table_name='bookings') + op.drop_index('bookings_room_id', table_name='bookings') + op.drop_index('bookings_status', table_name='bookings') + op.drop_index('bookings_user_id', table_name='bookings') + op.create_index(op.f('ix_bookings_booking_number'), 'bookings', ['booking_number'], unique=True) + op.create_index(op.f('ix_bookings_id'), 'bookings', ['id'], unique=False) + op.create_foreign_key(None, 'bookings', 'users', ['user_id'], ['id']) + op.create_foreign_key(None, 'bookings', 'rooms', ['room_id'], ['id']) + # Drop foreign keys first, then indexes + op.drop_constraint('checkin_checkout_ibfk_1', 'checkin_checkout', type_='foreignkey') + op.drop_constraint('checkin_checkout_ibfk_2', 'checkin_checkout', type_='foreignkey') + op.drop_constraint('checkin_checkout_ibfk_3', 'checkin_checkout', type_='foreignkey') + op.drop_index('checkin_checkout_booking_id', table_name='checkin_checkout') + op.create_index(op.f('ix_checkin_checkout_id'), 'checkin_checkout', ['id'], unique=False) + op.create_unique_constraint(None, 'checkin_checkout', ['booking_id']) + op.create_foreign_key(None, 'checkin_checkout', 'bookings', ['booking_id'], ['id']) + op.create_foreign_key(None, 'checkin_checkout', 'users', ['checkout_by'], ['id']) + op.create_foreign_key(None, 'checkin_checkout', 'users', ['checkin_by'], ['id']) + # Drop foreign keys first, then indexes + op.drop_constraint('favorites_ibfk_2', 'favorites', type_='foreignkey') + op.drop_constraint('favorites_ibfk_1', 'favorites', type_='foreignkey') + op.drop_index('favorites_room_id', table_name='favorites') + op.drop_index('favorites_user_id', table_name='favorites') + op.drop_index('unique_user_room_favorite', table_name='favorites') + op.create_index(op.f('ix_favorites_id'), 'favorites', ['id'], unique=False) + op.create_foreign_key(None, 'favorites', 'users', ['user_id'], ['id']) + op.create_foreign_key(None, 'favorites', 'rooms', ['room_id'], ['id']) + op.alter_column('password_reset_tokens', 'used', + existing_type=mysql.TINYINT(display_width=1), + nullable=False, + existing_server_default=sa.text("'0'")) + # Drop foreign key first, then indexes + op.drop_constraint('password_reset_tokens_ibfk_1', 'password_reset_tokens', type_='foreignkey') + op.drop_index('password_reset_tokens_token', table_name='password_reset_tokens') + op.drop_index('password_reset_tokens_user_id', table_name='password_reset_tokens') + op.drop_index('token', table_name='password_reset_tokens') + op.create_index(op.f('ix_password_reset_tokens_id'), 'password_reset_tokens', ['id'], unique=False) + op.create_index(op.f('ix_password_reset_tokens_token'), 'password_reset_tokens', ['token'], unique=True) + op.create_foreign_key(None, 'password_reset_tokens', 'users', ['user_id'], ['id']) + op.alter_column('payments', 'deposit_percentage', + existing_type=mysql.INTEGER(), + comment=None, + existing_comment='Percentage of deposit (e.g., 20, 30, 50)', + existing_nullable=True) + # Drop foreign keys first, then indexes + op.drop_constraint('payments_related_payment_id_foreign_idx', 'payments', type_='foreignkey') + op.drop_constraint('payments_ibfk_1', 'payments', type_='foreignkey') + op.drop_index('payments_booking_id', table_name='payments') + op.drop_index('payments_payment_status', table_name='payments') + op.create_index(op.f('ix_payments_id'), 'payments', ['id'], unique=False) + op.create_foreign_key(None, 'payments', 'bookings', ['booking_id'], ['id']) + op.create_foreign_key(None, 'payments', 'payments', ['related_payment_id'], ['id']) + op.drop_index('code', table_name='promotions') + op.drop_index('promotions_code', table_name='promotions') + op.drop_index('promotions_is_active', table_name='promotions') + op.create_index(op.f('ix_promotions_code'), 'promotions', ['code'], unique=True) + op.create_index(op.f('ix_promotions_id'), 'promotions', ['id'], unique=False) + # Drop foreign key first, then indexes + op.drop_constraint('refresh_tokens_ibfk_1', 'refresh_tokens', type_='foreignkey') + op.drop_index('refresh_tokens_token', table_name='refresh_tokens') + op.drop_index('refresh_tokens_user_id', table_name='refresh_tokens') + op.drop_index('token', table_name='refresh_tokens') + op.create_index(op.f('ix_refresh_tokens_id'), 'refresh_tokens', ['id'], unique=False) + op.create_index(op.f('ix_refresh_tokens_token'), 'refresh_tokens', ['token'], unique=True) + op.create_foreign_key(None, 'refresh_tokens', 'users', ['user_id'], ['id']) + # Drop foreign keys first, then indexes + op.drop_constraint('reviews_ibfk_2', 'reviews', type_='foreignkey') + op.drop_constraint('reviews_ibfk_1', 'reviews', type_='foreignkey') + op.drop_index('reviews_room_id', table_name='reviews') + op.drop_index('reviews_status', table_name='reviews') + op.drop_index('reviews_user_id', table_name='reviews') + op.create_index(op.f('ix_reviews_id'), 'reviews', ['id'], unique=False) + op.create_foreign_key(None, 'reviews', 'rooms', ['room_id'], ['id']) + op.create_foreign_key(None, 'reviews', 'users', ['user_id'], ['id']) + op.drop_index('name', table_name='roles') + op.create_index(op.f('ix_roles_id'), 'roles', ['id'], unique=False) + op.create_index(op.f('ix_roles_name'), 'roles', ['name'], unique=True) + op.create_index(op.f('ix_room_types_id'), 'room_types', ['id'], unique=False) + # Drop foreign key first, then indexes + op.drop_constraint('rooms_ibfk_1', 'rooms', type_='foreignkey') + op.drop_index('room_number', table_name='rooms') + op.drop_index('rooms_featured', table_name='rooms') + op.drop_index('rooms_room_type_id', table_name='rooms') + op.drop_index('rooms_status', table_name='rooms') + op.create_index(op.f('ix_rooms_id'), 'rooms', ['id'], unique=False) + op.create_index(op.f('ix_rooms_room_number'), 'rooms', ['room_number'], unique=True) + op.create_foreign_key(None, 'rooms', 'room_types', ['room_type_id'], ['id']) + # Drop foreign keys first, then indexes + op.drop_constraint('service_usages_ibfk_1', 'service_usages', type_='foreignkey') + op.drop_constraint('service_usages_ibfk_2', 'service_usages', type_='foreignkey') + op.drop_index('service_usages_booking_id', table_name='service_usages') + op.drop_index('service_usages_service_id', table_name='service_usages') + op.create_index(op.f('ix_service_usages_id'), 'service_usages', ['id'], unique=False) + op.create_foreign_key(None, 'service_usages', 'bookings', ['booking_id'], ['id']) + op.create_foreign_key(None, 'service_usages', 'services', ['service_id'], ['id']) + op.drop_index('services_category', table_name='services') + op.create_index(op.f('ix_services_id'), 'services', ['id'], unique=False) + # Drop foreign key first, then indexes + op.drop_constraint('users_ibfk_1', 'users', type_='foreignkey') + op.drop_index('email', table_name='users') + op.drop_index('users_email', table_name='users') + op.drop_index('users_role_id', table_name='users') + op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True) + op.create_index(op.f('ix_users_id'), 'users', ['id'], unique=False) + op.create_foreign_key(None, 'users', 'roles', ['role_id'], ['id']) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint(None, 'users', type_='foreignkey') + op.create_foreign_key('users_ibfk_1', 'users', 'roles', ['role_id'], ['id'], onupdate='CASCADE', ondelete='RESTRICT') + op.drop_index(op.f('ix_users_id'), table_name='users') + op.drop_index(op.f('ix_users_email'), table_name='users') + op.create_index('users_role_id', 'users', ['role_id'], unique=False) + op.create_index('users_email', 'users', ['email'], unique=False) + op.create_index('email', 'users', ['email'], unique=False) + op.drop_index(op.f('ix_services_id'), table_name='services') + op.create_index('services_category', 'services', ['category'], unique=False) + op.drop_constraint(None, 'service_usages', type_='foreignkey') + op.drop_constraint(None, 'service_usages', type_='foreignkey') + op.create_foreign_key('service_usages_ibfk_2', 'service_usages', 'services', ['service_id'], ['id'], onupdate='CASCADE', ondelete='RESTRICT') + op.create_foreign_key('service_usages_ibfk_1', 'service_usages', 'bookings', ['booking_id'], ['id'], onupdate='CASCADE', ondelete='RESTRICT') + op.drop_index(op.f('ix_service_usages_id'), table_name='service_usages') + op.create_index('service_usages_service_id', 'service_usages', ['service_id'], unique=False) + op.create_index('service_usages_booking_id', 'service_usages', ['booking_id'], unique=False) + op.drop_constraint(None, 'rooms', type_='foreignkey') + op.create_foreign_key('rooms_ibfk_1', 'rooms', 'room_types', ['room_type_id'], ['id'], onupdate='CASCADE', ondelete='RESTRICT') + op.drop_index(op.f('ix_rooms_room_number'), table_name='rooms') + op.drop_index(op.f('ix_rooms_id'), table_name='rooms') + op.create_index('rooms_status', 'rooms', ['status'], unique=False) + op.create_index('rooms_room_type_id', 'rooms', ['room_type_id'], unique=False) + op.create_index('rooms_featured', 'rooms', ['featured'], unique=False) + op.create_index('room_number', 'rooms', ['room_number'], unique=False) + op.drop_index(op.f('ix_room_types_id'), table_name='room_types') + op.drop_index(op.f('ix_roles_name'), table_name='roles') + op.drop_index(op.f('ix_roles_id'), table_name='roles') + op.create_index('name', 'roles', ['name'], unique=False) + op.drop_constraint(None, 'reviews', type_='foreignkey') + op.drop_constraint(None, 'reviews', type_='foreignkey') + op.create_foreign_key('reviews_ibfk_1', 'reviews', 'users', ['user_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') + op.create_foreign_key('reviews_ibfk_2', 'reviews', 'rooms', ['room_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') + op.drop_index(op.f('ix_reviews_id'), table_name='reviews') + op.create_index('reviews_user_id', 'reviews', ['user_id'], unique=False) + op.create_index('reviews_status', 'reviews', ['status'], unique=False) + op.create_index('reviews_room_id', 'reviews', ['room_id'], unique=False) + op.drop_constraint(None, 'refresh_tokens', type_='foreignkey') + op.create_foreign_key('refresh_tokens_ibfk_1', 'refresh_tokens', 'users', ['user_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') + op.drop_index(op.f('ix_refresh_tokens_token'), table_name='refresh_tokens') + op.drop_index(op.f('ix_refresh_tokens_id'), table_name='refresh_tokens') + op.create_index('token', 'refresh_tokens', ['token'], unique=False) + op.create_index('refresh_tokens_user_id', 'refresh_tokens', ['user_id'], unique=False) + op.create_index('refresh_tokens_token', 'refresh_tokens', ['token'], unique=False) + op.drop_index(op.f('ix_promotions_id'), table_name='promotions') + op.drop_index(op.f('ix_promotions_code'), table_name='promotions') + op.create_index('promotions_is_active', 'promotions', ['is_active'], unique=False) + op.create_index('promotions_code', 'promotions', ['code'], unique=False) + op.create_index('code', 'promotions', ['code'], unique=False) + op.drop_constraint(None, 'payments', type_='foreignkey') + op.drop_constraint(None, 'payments', type_='foreignkey') + op.create_foreign_key('payments_ibfk_1', 'payments', 'bookings', ['booking_id'], ['id'], onupdate='CASCADE', ondelete='RESTRICT') + op.create_foreign_key('payments_related_payment_id_foreign_idx', 'payments', 'payments', ['related_payment_id'], ['id'], onupdate='CASCADE', ondelete='SET NULL') + op.drop_index(op.f('ix_payments_id'), table_name='payments') + op.create_index('payments_payment_status', 'payments', ['payment_status'], unique=False) + op.create_index('payments_booking_id', 'payments', ['booking_id'], unique=False) + op.alter_column('payments', 'deposit_percentage', + existing_type=mysql.INTEGER(), + comment='Percentage of deposit (e.g., 20, 30, 50)', + existing_nullable=True) + op.drop_constraint(None, 'password_reset_tokens', type_='foreignkey') + op.create_foreign_key('password_reset_tokens_ibfk_1', 'password_reset_tokens', 'users', ['user_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') + op.drop_index(op.f('ix_password_reset_tokens_token'), table_name='password_reset_tokens') + op.drop_index(op.f('ix_password_reset_tokens_id'), table_name='password_reset_tokens') + op.create_index('token', 'password_reset_tokens', ['token'], unique=False) + op.create_index('password_reset_tokens_user_id', 'password_reset_tokens', ['user_id'], unique=False) + op.create_index('password_reset_tokens_token', 'password_reset_tokens', ['token'], unique=False) + op.alter_column('password_reset_tokens', 'used', + existing_type=mysql.TINYINT(display_width=1), + nullable=True, + existing_server_default=sa.text("'0'")) + op.drop_constraint(None, 'favorites', type_='foreignkey') + op.drop_constraint(None, 'favorites', type_='foreignkey') + op.create_foreign_key('favorites_ibfk_1', 'favorites', 'users', ['user_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') + op.create_foreign_key('favorites_ibfk_2', 'favorites', 'rooms', ['room_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE') + op.drop_index(op.f('ix_favorites_id'), table_name='favorites') + op.create_index('unique_user_room_favorite', 'favorites', ['user_id', 'room_id'], unique=False) + op.create_index('favorites_user_id', 'favorites', ['user_id'], unique=False) + op.create_index('favorites_room_id', 'favorites', ['room_id'], unique=False) + op.drop_constraint(None, 'checkin_checkout', type_='foreignkey') + op.drop_constraint(None, 'checkin_checkout', type_='foreignkey') + op.drop_constraint(None, 'checkin_checkout', type_='foreignkey') + op.create_foreign_key('checkin_checkout_ibfk_3', 'checkin_checkout', 'users', ['checkout_by'], ['id'], onupdate='CASCADE', ondelete='SET NULL') + op.create_foreign_key('checkin_checkout_ibfk_2', 'checkin_checkout', 'users', ['checkin_by'], ['id'], onupdate='CASCADE', ondelete='SET NULL') + op.create_foreign_key('checkin_checkout_ibfk_1', 'checkin_checkout', 'bookings', ['booking_id'], ['id'], onupdate='CASCADE', ondelete='RESTRICT') + op.drop_constraint(None, 'checkin_checkout', type_='unique') + op.drop_index(op.f('ix_checkin_checkout_id'), table_name='checkin_checkout') + op.create_index('checkin_checkout_booking_id', 'checkin_checkout', ['booking_id'], unique=False) + op.drop_constraint(None, 'bookings', type_='foreignkey') + op.drop_constraint(None, 'bookings', type_='foreignkey') + op.create_foreign_key('bookings_ibfk_1', 'bookings', 'users', ['user_id'], ['id'], onupdate='CASCADE', ondelete='RESTRICT') + op.create_foreign_key('bookings_ibfk_2', 'bookings', 'rooms', ['room_id'], ['id'], onupdate='CASCADE', ondelete='RESTRICT') + op.drop_index(op.f('ix_bookings_id'), table_name='bookings') + op.drop_index(op.f('ix_bookings_booking_number'), table_name='bookings') + op.create_index('bookings_user_id', 'bookings', ['user_id'], unique=False) + op.create_index('bookings_status', 'bookings', ['status'], unique=False) + op.create_index('bookings_room_id', 'bookings', ['room_id'], unique=False) + op.create_index('bookings_check_out_date', 'bookings', ['check_out_date'], unique=False) + op.create_index('bookings_check_in_date', 'bookings', ['check_in_date'], unique=False) + op.create_index('bookings_booking_number', 'bookings', ['booking_number'], unique=False) + op.create_index('booking_number', 'bookings', ['booking_number'], unique=False) + op.drop_index(op.f('ix_banners_id'), table_name='banners') + op.create_index('banners_position', 'banners', ['position'], unique=False) + op.create_index('banners_is_active', 'banners', ['is_active'], unique=False) + op.create_table('SequelizeMeta', + sa.Column('name', mysql.VARCHAR(collation='utf8mb3_unicode_ci', length=255), nullable=False), + sa.PrimaryKeyConstraint('name'), + mysql_collate='utf8mb3_unicode_ci', + mysql_default_charset='utf8mb3', + mysql_engine='InnoDB' + ) + op.create_index('name', 'SequelizeMeta', ['name'], unique=False) + op.drop_index(op.f('ix_audit_logs_user_id'), table_name='audit_logs') + op.drop_index(op.f('ix_audit_logs_resource_type'), table_name='audit_logs') + op.drop_index(op.f('ix_audit_logs_resource_id'), table_name='audit_logs') + op.drop_index(op.f('ix_audit_logs_request_id'), table_name='audit_logs') + op.drop_index(op.f('ix_audit_logs_id'), table_name='audit_logs') + op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs') + op.drop_index(op.f('ix_audit_logs_action'), table_name='audit_logs') + op.drop_table('audit_logs') + # ### end Alembic commands ### + diff --git a/Backend/alembic/versions/__pycache__/59baf2338f8a_initial_migration_create_all_tables_.cpython-312.pyc b/Backend/alembic/versions/__pycache__/59baf2338f8a_initial_migration_create_all_tables_.cpython-312.pyc new file mode 100644 index 00000000..1cf866f5 Binary files /dev/null and b/Backend/alembic/versions/__pycache__/59baf2338f8a_initial_migration_create_all_tables_.cpython-312.pyc differ diff --git a/Backend/requirements.txt b/Backend/requirements.txt index 09351da6..0b60cf4d 100644 --- a/Backend/requirements.txt +++ b/Backend/requirements.txt @@ -17,3 +17,8 @@ aiosmtplib==3.0.1 jinja2==3.1.2 alembic==1.12.1 +# Enterprise features (optional but recommended) +# redis==5.0.1 # Uncomment if using Redis caching +# prometheus-client==0.19.0 # Uncomment for Prometheus metrics +# sentry-sdk==1.38.0 # Uncomment for Sentry error tracking + diff --git a/Backend/reset_user_passwords.py b/Backend/reset_user_passwords.py new file mode 100644 index 00000000..c317d3f2 --- /dev/null +++ b/Backend/reset_user_passwords.py @@ -0,0 +1,91 @@ +#!/usr/bin/env python3 +""" +Script to reset passwords for test users +""" + +import sys +import os +import bcrypt + +# Add the src directory to the path +sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'src')) + +from sqlalchemy.orm import Session +from src.config.database import SessionLocal +from src.models.user import User +from src.config.logging_config import setup_logging + +logger = setup_logging() + + +def hash_password(password: str) -> str: + """Hash password using bcrypt""" + password_bytes = password.encode('utf-8') + salt = bcrypt.gensalt() + hashed = bcrypt.hashpw(password_bytes, salt) + return hashed.decode('utf-8') + + +def reset_password(db: Session, email: str, new_password: str) -> bool: + """Reset password for a user""" + user = db.query(User).filter(User.email == email).first() + + if not user: + print(f"❌ User with email '{email}' not found") + return False + + # Hash new password + hashed_password = hash_password(new_password) + + # Update password + user.password = hashed_password + db.commit() + db.refresh(user) + + print(f"✅ Password reset for {email}") + print(f" New password: {new_password}") + print(f" Hash length: {len(user.password)} characters") + print() + + return True + + +def main(): + """Reset passwords for all test users""" + db = SessionLocal() + + try: + print("="*80) + print("RESETTING TEST USER PASSWORDS") + print("="*80) + print() + + test_users = [ + {"email": "admin@hotel.com", "password": "admin123"}, + {"email": "staff@hotel.com", "password": "staff123"}, + {"email": "customer@hotel.com", "password": "customer123"}, + ] + + for user_data in test_users: + reset_password(db, user_data["email"], user_data["password"]) + + print("="*80) + print("SUMMARY") + print("="*80) + print("All test user passwords have been reset.") + print("\nYou can now login with:") + for user_data in test_users: + print(f" {user_data['email']:<25} Password: {user_data['password']}") + print() + + except Exception as e: + logger.error(f"Error: {e}", exc_info=True) + print(f"\n❌ Error: {e}") + db.rollback() + finally: + db.close() + + +if __name__ == "__main__": + main() + diff --git a/Backend/run.py b/Backend/run.py index 22854924..77a5dd1e 100644 --- a/Backend/run.py +++ b/Backend/run.py @@ -3,21 +3,46 @@ Main entry point for the FastAPI server """ import uvicorn -import os -from dotenv import load_dotenv +from src.config.settings import settings +from src.config.logging_config import setup_logging, get_logger -load_dotenv() +# Setup logging +setup_logging() +logger = get_logger(__name__) if __name__ == "__main__": - port = int(os.getenv("PORT", 8000)) - host = os.getenv("HOST", "0.0.0.0") - reload = os.getenv("NODE_ENV") == "development" + logger.info(f"Starting {settings.APP_NAME} on {settings.HOST}:{settings.PORT}") + + import os + from pathlib import Path + + # Only watch the src directory to avoid watching logs, uploads, etc. + base_dir = Path(__file__).parent + src_dir = str(base_dir / "src") + + # Temporarily disable reload to stop constant "1 change detected" messages + # The file watcher is detecting changes that cause a loop + # TODO: Investigate what's causing constant file changes + use_reload = False # Disabled until we identify the source of constant changes uvicorn.run( "src.main:app", - host=host, - port=port, - reload=reload, - log_level="info" + host=settings.HOST, + port=8000, + reload=use_reload, + log_level=settings.LOG_LEVEL.lower(), + reload_dirs=[src_dir] if use_reload else None, + reload_excludes=[ + "*.log", + "*.pyc", + "*.pyo", + "*.pyd", + "__pycache__", + "**/__pycache__/**", + "*.db", + "*.sqlite", + "*.sqlite3" + ], + reload_delay=1.0 # Increase delay to reduce false positives ) diff --git a/Backend/src/__pycache__/main.cpython-312.pyc b/Backend/src/__pycache__/main.cpython-312.pyc index 719362bc..8a1eb58b 100644 Binary files a/Backend/src/__pycache__/main.cpython-312.pyc and b/Backend/src/__pycache__/main.cpython-312.pyc differ diff --git a/Backend/src/config/__pycache__/database.cpython-312.pyc b/Backend/src/config/__pycache__/database.cpython-312.pyc index 0889c1c7..6f7e73ee 100644 Binary files a/Backend/src/config/__pycache__/database.cpython-312.pyc and b/Backend/src/config/__pycache__/database.cpython-312.pyc differ diff --git a/Backend/src/config/__pycache__/logging_config.cpython-312.pyc b/Backend/src/config/__pycache__/logging_config.cpython-312.pyc new file mode 100644 index 00000000..286c8eb7 Binary files /dev/null and b/Backend/src/config/__pycache__/logging_config.cpython-312.pyc differ diff --git a/Backend/src/config/__pycache__/settings.cpython-312.pyc b/Backend/src/config/__pycache__/settings.cpython-312.pyc new file mode 100644 index 00000000..a44e4b31 Binary files /dev/null and b/Backend/src/config/__pycache__/settings.cpython-312.pyc differ diff --git a/Backend/src/config/database.py b/Backend/src/config/database.py index ba46dec3..c0d64367 100644 --- a/Backend/src/config/database.py +++ b/Backend/src/config/database.py @@ -1,38 +1,63 @@ -from sqlalchemy import create_engine +from sqlalchemy import create_engine, event from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker -import os -from dotenv import load_dotenv +from sqlalchemy.pool import QueuePool +from .settings import settings +from .logging_config import get_logger -load_dotenv() +logger = get_logger(__name__) -# Database configuration -DB_USER = os.getenv("DB_USER", "root") -DB_PASS = os.getenv("DB_PASS", "") -DB_NAME = os.getenv("DB_NAME", "hotel_db") -DB_HOST = os.getenv("DB_HOST", "localhost") -DB_PORT = os.getenv("DB_PORT", "3306") - -DATABASE_URL = f"mysql+pymysql://{DB_USER}:{DB_PASS}@{DB_HOST}:{DB_PORT}/{DB_NAME}" +# Database configuration using settings +DATABASE_URL = settings.database_url +# Enhanced engine configuration for enterprise use engine = create_engine( DATABASE_URL, - pool_pre_ping=True, - pool_recycle=300, - pool_size=5, - max_overflow=10, - echo=os.getenv("NODE_ENV") == "development" + poolclass=QueuePool, + pool_pre_ping=True, # Verify connections before using + pool_recycle=3600, # Recycle connections after 1 hour + pool_size=10, # Number of connections to maintain + max_overflow=20, # Additional connections beyond pool_size + echo=settings.is_development, # Log SQL queries in development + future=True, # Use SQLAlchemy 2.0 style + connect_args={ + "charset": "utf8mb4", + "connect_timeout": 10 + } ) +# Event listeners for connection pool monitoring +@event.listens_for(engine, "connect") +def set_sqlite_pragma(dbapi_conn, connection_record): + """Set connection-level settings""" + logger.debug("New database connection established") + +@event.listens_for(engine, "checkout") +def receive_checkout(dbapi_conn, connection_record, connection_proxy): + """Log connection checkout""" + logger.debug("Connection checked out from pool") + +@event.listens_for(engine, "checkin") +def receive_checkin(dbapi_conn, connection_record): + """Log connection checkin""" + logger.debug("Connection returned to pool") + SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) Base = declarative_base() # Dependency to get DB session def get_db(): + """ + Dependency for getting database session. + Automatically handles session lifecycle. + """ db = SessionLocal() try: yield db + except Exception: + db.rollback() + raise finally: db.close() diff --git a/Backend/src/config/logging_config.py b/Backend/src/config/logging_config.py new file mode 100644 index 00000000..89f585ba --- /dev/null +++ b/Backend/src/config/logging_config.py @@ -0,0 +1,96 @@ +""" +Enterprise-grade structured logging configuration +""" +import logging +import sys +from logging.handlers import RotatingFileHandler +from pathlib import Path +from typing import Optional +from .settings import settings + + +def setup_logging( + log_level: Optional[str] = None, + log_file: Optional[str] = None, + enable_file_logging: bool = True +) -> logging.Logger: + """ + Setup structured logging with file and console handlers + + Args: + log_level: Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL) + log_file: Path to log file + enable_file_logging: Whether to enable file logging + + Returns: + Configured root logger + """ + # Get configuration from settings + level = log_level or settings.LOG_LEVEL + log_file_path = log_file or settings.LOG_FILE + + # Convert string level to logging constant + numeric_level = getattr(logging, level.upper(), logging.INFO) + + # Create logs directory if it doesn't exist + if enable_file_logging and log_file_path: + log_path = Path(log_file_path) + log_path.parent.mkdir(parents=True, exist_ok=True) + + # Create formatter with structured format + detailed_formatter = logging.Formatter( + fmt='%(asctime)s | %(levelname)-8s | %(name)s | %(funcName)s:%(lineno)d | %(message)s', + datefmt='%Y-%m-%d %H:%M:%S' + ) + + simple_formatter = logging.Formatter( + fmt='%(asctime)s | %(levelname)-8s | %(message)s', + datefmt='%H:%M:%S' + ) + + # Configure root logger + root_logger = logging.getLogger() + root_logger.setLevel(numeric_level) + + # Remove existing handlers + root_logger.handlers.clear() + + # Console handler (always enabled) + console_handler = logging.StreamHandler(sys.stdout) + console_handler.setLevel(numeric_level) + console_handler.setFormatter(simple_formatter if settings.is_development else detailed_formatter) + root_logger.addHandler(console_handler) + + # File handler (rotating) - Disabled in development to avoid file watcher issues + if enable_file_logging and log_file_path and not settings.is_development: + file_handler = RotatingFileHandler( + log_file_path, + maxBytes=settings.LOG_MAX_BYTES, + backupCount=settings.LOG_BACKUP_COUNT, + encoding='utf-8' + ) + file_handler.setLevel(numeric_level) + file_handler.setFormatter(detailed_formatter) + root_logger.addHandler(file_handler) + + # Set levels for third-party loggers + logging.getLogger("uvicorn").setLevel(logging.INFO) + logging.getLogger("uvicorn.access").setLevel(logging.INFO if settings.is_development else logging.WARNING) + logging.getLogger("sqlalchemy.engine").setLevel(logging.WARNING) + logging.getLogger("slowapi").setLevel(logging.WARNING) + + return root_logger + + +def get_logger(name: str) -> logging.Logger: + """ + Get a logger instance with the given name + + Args: + name: Logger name (typically __name__) + + Returns: + Logger instance + """ + return logging.getLogger(name) + diff --git a/Backend/src/config/settings.py b/Backend/src/config/settings.py new file mode 100644 index 00000000..2881b6ab --- /dev/null +++ b/Backend/src/config/settings.py @@ -0,0 +1,119 @@ +""" +Enterprise-grade configuration management using Pydantic Settings +""" +from pydantic_settings import BaseSettings, SettingsConfigDict +from pydantic import Field +from typing import List +import os + + +class Settings(BaseSettings): + """Application settings with environment variable support""" + + model_config = SettingsConfigDict( + env_file=".env", + env_file_encoding="utf-8", + case_sensitive=False, + extra="ignore" + ) + + # Application + APP_NAME: str = Field(default="Hotel Booking API", description="Application name") + APP_VERSION: str = Field(default="1.0.0", description="Application version") + ENVIRONMENT: str = Field(default="development", description="Environment: development, staging, production") + DEBUG: bool = Field(default=False, description="Debug mode") + API_V1_PREFIX: str = Field(default="/api/v1", description="API v1 prefix") + + # Server + HOST: str = Field(default="0.0.0.0", description="Server host") + PORT: int = Field(default=8000, description="Server port") + + # Database + DB_USER: str = Field(default="root", description="Database user") + DB_PASS: str = Field(default="", description="Database password") + DB_NAME: str = Field(default="hotel_db", description="Database name") + DB_HOST: str = Field(default="localhost", description="Database host") + DB_PORT: str = Field(default="3306", description="Database port") + + # Security + JWT_SECRET: str = Field(default="dev-secret-key-change-in-production-12345", description="JWT secret key") + JWT_ALGORITHM: str = Field(default="HS256", description="JWT algorithm") + JWT_ACCESS_TOKEN_EXPIRE_MINUTES: int = Field(default=30, description="JWT access token expiration in minutes") + JWT_REFRESH_TOKEN_EXPIRE_DAYS: int = Field(default=7, description="JWT refresh token expiration in days") + + # CORS + CLIENT_URL: str = Field(default="http://localhost:5173", description="Frontend client URL") + CORS_ORIGINS: List[str] = Field( + default_factory=lambda: [ + "http://localhost:5173", + "http://localhost:3000", + "http://127.0.0.1:5173" + ], + description="Allowed CORS origins" + ) + + # Rate Limiting + RATE_LIMIT_ENABLED: bool = Field(default=True, description="Enable rate limiting") + RATE_LIMIT_PER_MINUTE: int = Field(default=60, description="Requests per minute per IP") + + # Logging + LOG_LEVEL: str = Field(default="INFO", description="Log level: DEBUG, INFO, WARNING, ERROR, CRITICAL") + LOG_FILE: str = Field(default="logs/app.log", description="Log file path") + LOG_MAX_BYTES: int = Field(default=10485760, description="Max log file size (10MB)") + LOG_BACKUP_COUNT: int = Field(default=5, description="Number of backup log files") + + # Email + SMTP_HOST: str = Field(default="smtp.gmail.com", description="SMTP host") + SMTP_PORT: int = Field(default=587, description="SMTP port") + SMTP_USER: str = Field(default="", description="SMTP username") + SMTP_PASSWORD: str = Field(default="", description="SMTP password") + SMTP_FROM_EMAIL: str = Field(default="", description="From email address") + SMTP_FROM_NAME: str = Field(default="Hotel Booking", description="From name") + + # File Upload + UPLOAD_DIR: str = Field(default="uploads", description="Upload directory") + MAX_UPLOAD_SIZE: int = Field(default=5242880, description="Max upload size in bytes (5MB)") + ALLOWED_EXTENSIONS: List[str] = Field( + default_factory=lambda: ["jpg", "jpeg", "png", "gif", "webp"], + description="Allowed file extensions" + ) + + # Redis (for caching) + REDIS_ENABLED: bool = Field(default=False, description="Enable Redis caching") + REDIS_HOST: str = Field(default="localhost", description="Redis host") + REDIS_PORT: int = Field(default=6379, description="Redis port") + REDIS_DB: int = Field(default=0, description="Redis database number") + REDIS_PASSWORD: str = Field(default="", description="Redis password") + + # Request Timeout + REQUEST_TIMEOUT: int = Field(default=30, description="Request timeout in seconds") + + # Health Check + HEALTH_CHECK_INTERVAL: int = Field(default=30, description="Health check interval in seconds") + + @property + def database_url(self) -> str: + """Construct database URL""" + return f"mysql+pymysql://{self.DB_USER}:{self.DB_PASS}@{self.DB_HOST}:{self.DB_PORT}/{self.DB_NAME}" + + @property + def is_production(self) -> bool: + """Check if running in production""" + return self.ENVIRONMENT.lower() == "production" + + @property + def is_development(self) -> bool: + """Check if running in development""" + return self.ENVIRONMENT.lower() == "development" + + @property + def redis_url(self) -> str: + """Construct Redis URL""" + if self.REDIS_PASSWORD: + return f"redis://:{self.REDIS_PASSWORD}@{self.REDIS_HOST}:{self.REDIS_PORT}/{self.REDIS_DB}" + return f"redis://{self.REDIS_HOST}:{self.REDIS_PORT}/{self.REDIS_DB}" + + +# Global settings instance +settings = Settings() + diff --git a/Backend/src/main.py b/Backend/src/main.py index 6e3d2cc2..ce48c970 100644 --- a/Backend/src/main.py +++ b/Backend/src/main.py @@ -1,17 +1,30 @@ -from fastapi import FastAPI, Request, HTTPException +from fastapi import FastAPI, Request, HTTPException, Depends, status from fastapi.middleware.cors import CORSMiddleware from fastapi.staticfiles import StaticFiles from fastapi.responses import JSONResponse from fastapi.exceptions import RequestValidationError -from sqlalchemy.exc import IntegrityError +from sqlalchemy.exc import IntegrityError, OperationalError from jose.exceptions import JWTError from slowapi import Limiter, _rate_limit_exceeded_handler from slowapi.util import get_remote_address from slowapi.errors import RateLimitExceeded -import os from pathlib import Path +from datetime import datetime +import sys -from .config.database import engine, Base +# Import configuration and logging FIRST +from .config.settings import settings +from .config.logging_config import setup_logging, get_logger +from .config.database import engine, Base, get_db +from . import models # noqa: F401 - ensure models are imported so tables are created +from sqlalchemy.orm import Session + +# Setup logging before anything else +logger = setup_logging() + +logger.info(f"Starting {settings.APP_NAME} v{settings.APP_VERSION} in {settings.ENVIRONMENT} mode") + +# Import middleware from .middleware.error_handler import ( validation_exception_handler, integrity_error_handler, @@ -19,38 +32,65 @@ from .middleware.error_handler import ( http_exception_handler, general_exception_handler ) -# Create database tables -Base.metadata.create_all(bind=engine) +from .middleware.request_id import RequestIDMiddleware +from .middleware.security import SecurityHeadersMiddleware +from .middleware.timeout import TimeoutMiddleware +from .middleware.cookie_consent import CookieConsentMiddleware + +# Create database tables (for development, migrations should be used in production) +if settings.is_development: + logger.info("Creating database tables (development mode)") + Base.metadata.create_all(bind=engine) +else: + # Ensure new cookie-related tables exist even if full migrations haven't been run yet. + try: + from .models.cookie_policy import CookiePolicy + from .models.cookie_integration_config import CookieIntegrationConfig + logger.info("Ensuring cookie-related tables exist") + CookiePolicy.__table__.create(bind=engine, checkfirst=True) + CookieIntegrationConfig.__table__.create(bind=engine, checkfirst=True) + except Exception as e: + logger.error(f"Failed to ensure cookie tables exist: {e}") from .routes import auth_routes +from .routes import privacy_routes # Initialize FastAPI app app = FastAPI( - title="Hotel Booking API", - description="Hotel booking backend API", - version="1.0.0" + title=settings.APP_NAME, + description="Enterprise-grade Hotel Booking API", + version=settings.APP_VERSION, + docs_url="/api/docs" if not settings.is_production else None, + redoc_url="/api/redoc" if not settings.is_production else None, + openapi_url="/api/openapi.json" if not settings.is_production else None ) +# Add middleware in order (order matters!) +# 1. Request ID middleware (first to add request ID) +app.add_middleware(RequestIDMiddleware) + +# 2. Cookie consent middleware (makes consent available on request.state) +app.add_middleware(CookieConsentMiddleware) + +# 3. Timeout middleware +if settings.REQUEST_TIMEOUT > 0: + app.add_middleware(TimeoutMiddleware) + +# 4. Security headers middleware +app.add_middleware(SecurityHeadersMiddleware) + # Rate limiting -limiter = Limiter(key_func=get_remote_address) -app.state.limiter = limiter -app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler) +if settings.RATE_LIMIT_ENABLED: + limiter = Limiter( + key_func=get_remote_address, + default_limits=[f"{settings.RATE_LIMIT_PER_MINUTE}/minute"] + ) + app.state.limiter = limiter + app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler) + logger.info(f"Rate limiting enabled: {settings.RATE_LIMIT_PER_MINUTE} requests/minute") # CORS configuration -# Allow multiple origins for development -client_url = os.getenv("CLIENT_URL", "http://localhost:5173") -allowed_origins = [ - client_url, - "http://localhost:5173", # Vite default - "http://localhost:3000", # Alternative port - "http://localhost:5174", # Vite alternative - "http://127.0.0.1:5173", - "http://127.0.0.1:3000", - "http://127.0.0.1:5174", -] - -# In development, allow all localhost origins using regex -if os.getenv("ENVIRONMENT", "development") == "development": +if settings.is_development: # For development, use regex to allow any localhost port app.add_middleware( CORSMiddleware, @@ -59,18 +99,20 @@ if os.getenv("ENVIRONMENT", "development") == "development": allow_methods=["*"], allow_headers=["*"], ) + logger.info("CORS configured for development (allowing localhost)") else: # Production: use specific origins app.add_middleware( CORSMiddleware, - allow_origins=allowed_origins, + allow_origins=settings.CORS_ORIGINS, allow_credentials=True, - allow_methods=["*"], + allow_methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"], allow_headers=["*"], ) + logger.info(f"CORS configured for production with {len(settings.CORS_ORIGINS)} allowed origins") # Serve static files (uploads) -uploads_dir = Path(__file__).parent.parent / "uploads" +uploads_dir = Path(__file__).parent.parent / settings.UPLOAD_DIR uploads_dir.mkdir(exist_ok=True) app.mount("/uploads", StaticFiles(directory=str(uploads_dir)), name="uploads") @@ -81,25 +123,82 @@ app.add_exception_handler(IntegrityError, integrity_error_handler) app.add_exception_handler(JWTError, jwt_error_handler) app.add_exception_handler(Exception, general_exception_handler) -# Health check -@app.get("/health") -async def health_check(): +# Enhanced Health check with database connectivity +@app.get("/health", tags=["health"]) +async def health_check(db: Session = Depends(get_db)): + """ + Enhanced health check endpoint with database connectivity test + """ + health_status = { + "status": "healthy", + "timestamp": datetime.utcnow().isoformat(), + "service": settings.APP_NAME, + "version": settings.APP_VERSION, + "environment": settings.ENVIRONMENT, + "checks": { + "api": "ok", + "database": "unknown" + } + } + + # Check database connectivity + try: + from sqlalchemy import text + db.execute(text("SELECT 1")) + health_status["checks"]["database"] = "ok" + except OperationalError as e: + health_status["status"] = "unhealthy" + health_status["checks"]["database"] = "error" + health_status["error"] = str(e) + logger.error(f"Database health check failed: {str(e)}") + return JSONResponse( + status_code=status.HTTP_503_SERVICE_UNAVAILABLE, + content=health_status + ) + except Exception as e: + health_status["status"] = "unhealthy" + health_status["checks"]["database"] = "error" + health_status["error"] = str(e) + logger.error(f"Health check failed: {str(e)}") + return JSONResponse( + status_code=status.HTTP_503_SERVICE_UNAVAILABLE, + content=health_status + ) + + return health_status + + +# Metrics endpoint (basic) +@app.get("/metrics", tags=["monitoring"]) +async def metrics(): + """ + Basic metrics endpoint (can be extended with Prometheus or similar) + """ return { "status": "success", - "message": "Server is running", - "timestamp": __import__("datetime").datetime.utcnow().isoformat() + "service": settings.APP_NAME, + "version": settings.APP_VERSION, + "environment": settings.ENVIRONMENT, + "timestamp": datetime.utcnow().isoformat() } -# API Routes +# API Routes with versioning +# Legacy routes (maintain backward compatibility) app.include_router(auth_routes.router, prefix="/api") +app.include_router(privacy_routes.router, prefix="/api") + +# Versioned API routes (v1) +app.include_router(auth_routes.router, prefix=settings.API_V1_PREFIX) +app.include_router(privacy_routes.router, prefix=settings.API_V1_PREFIX) # Import and include other routes from .routes import ( room_routes, booking_routes, payment_routes, banner_routes, favorite_routes, service_routes, promotion_routes, report_routes, - review_routes, user_routes + review_routes, user_routes, audit_routes, admin_privacy_routes ) +# Legacy routes (maintain backward compatibility) app.include_router(room_routes.router, prefix="/api") app.include_router(booking_routes.router, prefix="/api") app.include_router(payment_routes.router, prefix="/api") @@ -110,12 +209,66 @@ app.include_router(promotion_routes.router, prefix="/api") app.include_router(report_routes.router, prefix="/api") app.include_router(review_routes.router, prefix="/api") app.include_router(user_routes.router, prefix="/api") +app.include_router(audit_routes.router, prefix="/api") +app.include_router(admin_privacy_routes.router, prefix="/api") -# Note: FastAPI automatically handles 404s for unmatched routes -# This handler is kept for custom 404 responses but may not be needed +# Versioned routes (v1) +app.include_router(room_routes.router, prefix=settings.API_V1_PREFIX) +app.include_router(booking_routes.router, prefix=settings.API_V1_PREFIX) +app.include_router(payment_routes.router, prefix=settings.API_V1_PREFIX) +app.include_router(banner_routes.router, prefix=settings.API_V1_PREFIX) +app.include_router(favorite_routes.router, prefix=settings.API_V1_PREFIX) +app.include_router(service_routes.router, prefix=settings.API_V1_PREFIX) +app.include_router(promotion_routes.router, prefix=settings.API_V1_PREFIX) +app.include_router(report_routes.router, prefix=settings.API_V1_PREFIX) +app.include_router(review_routes.router, prefix=settings.API_V1_PREFIX) +app.include_router(user_routes.router, prefix=settings.API_V1_PREFIX) +app.include_router(audit_routes.router, prefix=settings.API_V1_PREFIX) +app.include_router(admin_privacy_routes.router, prefix=settings.API_V1_PREFIX) + +logger.info("All routes registered successfully") + +# Startup event +@app.on_event("startup") +async def startup_event(): + """Run on application startup""" + logger.info(f"{settings.APP_NAME} started successfully") + logger.info(f"Environment: {settings.ENVIRONMENT}") + logger.info(f"Debug mode: {settings.DEBUG}") + logger.info(f"API version: {settings.API_V1_PREFIX}") + +# Shutdown event +@app.on_event("shutdown") +async def shutdown_event(): + """Run on application shutdown""" + logger.info(f"{settings.APP_NAME} shutting down gracefully") if __name__ == "__main__": import uvicorn - port = int(os.getenv("PORT", 3000)) - uvicorn.run("main:app", host="0.0.0.0", port=port, reload=True) + from pathlib import Path + + # Only watch the src directory to avoid watching logs, uploads, etc. + base_dir = Path(__file__).parent.parent + src_dir = str(base_dir / "src") + + uvicorn.run( + "src.main:app", + host=settings.HOST, + port=settings.PORT, + reload=settings.is_development, + log_level=settings.LOG_LEVEL.lower(), + reload_dirs=[src_dir] if settings.is_development else None, + reload_excludes=[ + "*.log", + "*.pyc", + "*.pyo", + "*.pyd", + "__pycache__", + "**/__pycache__/**", + "*.db", + "*.sqlite", + "*.sqlite3" + ], + reload_delay=0.5 # Increase delay to reduce false positives + ) diff --git a/Backend/src/middleware/__pycache__/auth.cpython-312.pyc b/Backend/src/middleware/__pycache__/auth.cpython-312.pyc index 69f3ec2d..045b62bc 100644 Binary files a/Backend/src/middleware/__pycache__/auth.cpython-312.pyc and b/Backend/src/middleware/__pycache__/auth.cpython-312.pyc differ diff --git a/Backend/src/middleware/__pycache__/cookie_consent.cpython-312.pyc b/Backend/src/middleware/__pycache__/cookie_consent.cpython-312.pyc new file mode 100644 index 00000000..12cfee5c Binary files /dev/null and b/Backend/src/middleware/__pycache__/cookie_consent.cpython-312.pyc differ diff --git a/Backend/src/middleware/__pycache__/error_handler.cpython-312.pyc b/Backend/src/middleware/__pycache__/error_handler.cpython-312.pyc index 1775f1b9..93da6889 100644 Binary files a/Backend/src/middleware/__pycache__/error_handler.cpython-312.pyc and b/Backend/src/middleware/__pycache__/error_handler.cpython-312.pyc differ diff --git a/Backend/src/middleware/__pycache__/request_id.cpython-312.pyc b/Backend/src/middleware/__pycache__/request_id.cpython-312.pyc new file mode 100644 index 00000000..ea615492 Binary files /dev/null and b/Backend/src/middleware/__pycache__/request_id.cpython-312.pyc differ diff --git a/Backend/src/middleware/__pycache__/security.cpython-312.pyc b/Backend/src/middleware/__pycache__/security.cpython-312.pyc new file mode 100644 index 00000000..a4e0fb76 Binary files /dev/null and b/Backend/src/middleware/__pycache__/security.cpython-312.pyc differ diff --git a/Backend/src/middleware/__pycache__/timeout.cpython-312.pyc b/Backend/src/middleware/__pycache__/timeout.cpython-312.pyc new file mode 100644 index 00000000..12ef31a5 Binary files /dev/null and b/Backend/src/middleware/__pycache__/timeout.cpython-312.pyc differ diff --git a/Backend/src/middleware/auth.py b/Backend/src/middleware/auth.py index 07360e02..16839adf 100644 --- a/Backend/src/middleware/auth.py +++ b/Backend/src/middleware/auth.py @@ -6,6 +6,7 @@ from typing import Optional import os from ..config.database import get_db +from ..config.settings import settings from ..models.user import User security = HTTPBearer() @@ -26,7 +27,8 @@ def get_current_user( ) try: - payload = jwt.decode(token, os.getenv("JWT_SECRET"), algorithms=["HS256"]) + jwt_secret = getattr(settings, 'JWT_SECRET', None) or os.getenv("JWT_SECRET", "dev-secret-key-change-in-production-12345") + payload = jwt.decode(token, jwt_secret, algorithms=["HS256"]) user_id: int = payload.get("userId") if user_id is None: raise credentials_exception diff --git a/Backend/src/middleware/cookie_consent.py b/Backend/src/middleware/cookie_consent.py new file mode 100644 index 00000000..a93f6b6e --- /dev/null +++ b/Backend/src/middleware/cookie_consent.py @@ -0,0 +1,89 @@ +import json +from typing import Callable, Awaitable + +from fastapi import Request, Response +from starlette.middleware.base import BaseHTTPMiddleware + +from ..schemas.privacy import CookieConsent, CookieCategoryPreferences +from ..config.settings import settings +from ..config.logging_config import get_logger + + +logger = get_logger(__name__) + + +COOKIE_CONSENT_COOKIE_NAME = "cookieConsent" + + +def _parse_consent_cookie(raw_value: str | None) -> CookieConsent: + if not raw_value: + return CookieConsent() # Defaults: only necessary = True + + try: + data = json.loads(raw_value) + # Pydantic will validate and coerce as needed + return CookieConsent(**data) + except Exception as exc: # pragma: no cover - defensive + logger.warning(f"Failed to parse cookie consent cookie: {exc}") + return CookieConsent() + + +class CookieConsentMiddleware(BaseHTTPMiddleware): + """ + Middleware that parses the cookie consent cookie (if present) and attaches it + to `request.state.cookie_consent` for downstream handlers. + """ + + async def dispatch( + self, request: Request, call_next: Callable[[Request], Awaitable[Response]] + ) -> Response: + raw_cookie = request.cookies.get(COOKIE_CONSENT_COOKIE_NAME) + consent = _parse_consent_cookie(raw_cookie) + + # Ensure 'necessary' is always true regardless of stored value + consent.categories.necessary = True + + request.state.cookie_consent = consent + + response = await call_next(request) + + # If there's no cookie yet, set a minimal default consent cookie + # so that the banner can be rendered based on server-side knowledge. + if COOKIE_CONSENT_COOKIE_NAME not in request.cookies: + try: + response.set_cookie( + key=COOKIE_CONSENT_COOKIE_NAME, + value=consent.model_dump_json(), + httponly=True, + secure=settings.is_production, + samesite="lax", + max_age=365 * 24 * 60 * 60, # 1 year + path="/", + ) + except Exception as exc: # pragma: no cover - defensive + logger.warning(f"Failed to set default cookie consent cookie: {exc}") + + return response + + +def is_analytics_allowed(request: Request) -> bool: + consent: CookieConsent | None = getattr(request.state, "cookie_consent", None) + if not consent: + return False + return consent.categories.analytics + + +def is_marketing_allowed(request: Request) -> bool: + consent: CookieConsent | None = getattr(request.state, "cookie_consent", None) + if not consent: + return False + return consent.categories.marketing + + +def is_preferences_allowed(request: Request) -> bool: + consent: CookieConsent | None = getattr(request.state, "cookie_consent", None) + if not consent: + return False + return consent.categories.preferences + + diff --git a/Backend/src/middleware/error_handler.py b/Backend/src/middleware/error_handler.py index 0cafedd0..41fa3be9 100644 --- a/Backend/src/middleware/error_handler.py +++ b/Backend/src/middleware/error_handler.py @@ -3,7 +3,6 @@ from fastapi.responses import JSONResponse from fastapi.exceptions import RequestValidationError from sqlalchemy.exc import IntegrityError from jose.exceptions import JWTError -import os import traceback @@ -96,10 +95,23 @@ async def general_exception_handler(request: Request, exc: Exception): """ Handle all other exceptions """ - # Log error - print(f"Error: {exc}") - if os.getenv("NODE_ENV") == "development": - traceback.print_exc() + from ..config.logging_config import get_logger + from ..config.settings import settings + + logger = get_logger(__name__) + request_id = getattr(request.state, "request_id", None) + + # Log error with context + logger.error( + f"Unhandled exception: {type(exc).__name__}: {str(exc)}", + extra={ + "request_id": request_id, + "path": request.url.path, + "method": request.method, + "exception_type": type(exc).__name__ + }, + exc_info=True + ) # Handle HTTPException with dict detail if isinstance(exc, Exception) and hasattr(exc, "status_code"): @@ -116,12 +128,17 @@ async def general_exception_handler(request: Request, exc: Exception): status_code = status.HTTP_500_INTERNAL_SERVER_ERROR message = str(exc) if str(exc) else "Internal server error" + response_content = { + "status": "error", + "message": message + } + + # Add stack trace in development + if settings.is_development: + response_content["stack"] = traceback.format_exc() + return JSONResponse( status_code=status_code, - content={ - "status": "error", - "message": message, - **({"stack": traceback.format_exc()} if os.getenv("NODE_ENV") == "development" else {}) - } + content=response_content ) diff --git a/Backend/src/middleware/request_id.py b/Backend/src/middleware/request_id.py new file mode 100644 index 00000000..6fe0732c --- /dev/null +++ b/Backend/src/middleware/request_id.py @@ -0,0 +1,65 @@ +""" +Request ID middleware for tracking requests across services +""" +import uuid +from fastapi import Request +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.responses import Response +from ..config.logging_config import get_logger + +logger = get_logger(__name__) + + +class RequestIDMiddleware(BaseHTTPMiddleware): + """Add unique request ID to each request for tracing""" + + async def dispatch(self, request: Request, call_next): + # Generate or get request ID + request_id = request.headers.get("X-Request-ID") or str(uuid.uuid4()) + + # Add request ID to request state + request.state.request_id = request_id + + # Log request + logger.info( + f"Request started: {request.method} {request.url.path}", + extra={ + "request_id": request_id, + "method": request.method, + "path": request.url.path, + "client_ip": request.client.host if request.client else None + } + ) + + # Process request + try: + response = await call_next(request) + + # Add request ID to response headers + response.headers["X-Request-ID"] = request_id + + # Log response + logger.info( + f"Request completed: {request.method} {request.url.path} - {response.status_code}", + extra={ + "request_id": request_id, + "method": request.method, + "path": request.url.path, + "status_code": response.status_code + } + ) + + return response + except Exception as e: + logger.error( + f"Request failed: {request.method} {request.url.path} - {str(e)}", + extra={ + "request_id": request_id, + "method": request.method, + "path": request.url.path, + "error": str(e) + }, + exc_info=True + ) + raise + diff --git a/Backend/src/middleware/security.py b/Backend/src/middleware/security.py new file mode 100644 index 00000000..e575fd31 --- /dev/null +++ b/Backend/src/middleware/security.py @@ -0,0 +1,57 @@ +""" +Security middleware for adding security headers +""" +from fastapi import Request +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.responses import Response +from ..config.logging_config import get_logger +from ..config.settings import settings + +logger = get_logger(__name__) + + +class SecurityHeadersMiddleware(BaseHTTPMiddleware): + """Add security headers to all responses""" + + async def dispatch(self, request: Request, call_next): + response = await call_next(request) + + # Security headers + security_headers = { + "X-Content-Type-Options": "nosniff", + "X-Frame-Options": "DENY", + "X-XSS-Protection": "1; mode=block", + "Referrer-Policy": "strict-origin-when-cross-origin", + "Permissions-Policy": "geolocation=(), microphone=(), camera=()", + } + + # Allow resources (like banner images) to be loaded cross-origin by the frontend. + # This helps avoid Firefox's OpaqueResponseBlocking when the frontend runs + # on a different origin (e.g. Vite dev server on :5173) and loads images + # from the API origin (e.g. :8000). + # + # In production you may want a stricter policy (e.g. "same-site") depending + # on your deployment topology. + security_headers.setdefault("Cross-Origin-Resource-Policy", "cross-origin") + + # Add Content-Security-Policy + if settings.is_production: + security_headers["Content-Security-Policy"] = ( + "default-src 'self'; " + "script-src 'self' 'unsafe-inline' 'unsafe-eval'; " + "style-src 'self' 'unsafe-inline'; " + "img-src 'self' data: https:; " + "font-src 'self' data:; " + "connect-src 'self'" + ) + + # Add Strict-Transport-Security in production with HTTPS + if settings.is_production: + security_headers["Strict-Transport-Security"] = "max-age=31536000; includeSubDomains" + + # Apply headers + for header, value in security_headers.items(): + response.headers[header] = value + + return response + diff --git a/Backend/src/middleware/timeout.py b/Backend/src/middleware/timeout.py new file mode 100644 index 00000000..db0c5ada --- /dev/null +++ b/Backend/src/middleware/timeout.py @@ -0,0 +1,41 @@ +""" +Request timeout middleware +""" +import asyncio +from fastapi import Request, HTTPException, status +from starlette.middleware.base import BaseHTTPMiddleware +from ..config.logging_config import get_logger +from ..config.settings import settings + +logger = get_logger(__name__) + + +class TimeoutMiddleware(BaseHTTPMiddleware): + """Add timeout to requests""" + + async def dispatch(self, request: Request, call_next): + try: + # Use asyncio.wait_for to add timeout + response = await asyncio.wait_for( + call_next(request), + timeout=settings.REQUEST_TIMEOUT + ) + return response + except asyncio.TimeoutError: + logger.warning( + f"Request timeout: {request.method} {request.url.path}", + extra={ + "request_id": getattr(request.state, "request_id", None), + "method": request.method, + "path": request.url.path, + "timeout": settings.REQUEST_TIMEOUT + } + ) + raise HTTPException( + status_code=status.HTTP_504_GATEWAY_TIMEOUT, + detail={ + "status": "error", + "message": "Request timeout. Please try again." + } + ) + diff --git a/Backend/src/models/__init__.py b/Backend/src/models/__init__.py index c1f28754..fc1e9f1c 100644 --- a/Backend/src/models/__init__.py +++ b/Backend/src/models/__init__.py @@ -13,6 +13,9 @@ from .checkin_checkout import CheckInCheckOut from .banner import Banner from .review import Review from .favorite import Favorite +from .audit_log import AuditLog +from .cookie_policy import CookiePolicy +from .cookie_integration_config import CookieIntegrationConfig __all__ = [ "Role", @@ -30,5 +33,8 @@ __all__ = [ "Banner", "Review", "Favorite", + "AuditLog", + "CookiePolicy", + "CookieIntegrationConfig", ] diff --git a/Backend/src/models/__pycache__/__init__.cpython-312.pyc b/Backend/src/models/__pycache__/__init__.cpython-312.pyc index 640e9f1f..a019c14e 100644 Binary files a/Backend/src/models/__pycache__/__init__.cpython-312.pyc and b/Backend/src/models/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/src/models/__pycache__/audit_log.cpython-312.pyc b/Backend/src/models/__pycache__/audit_log.cpython-312.pyc new file mode 100644 index 00000000..5bab501e Binary files /dev/null and b/Backend/src/models/__pycache__/audit_log.cpython-312.pyc differ diff --git a/Backend/src/models/__pycache__/cookie_integration_config.cpython-312.pyc b/Backend/src/models/__pycache__/cookie_integration_config.cpython-312.pyc new file mode 100644 index 00000000..ec82a456 Binary files /dev/null and b/Backend/src/models/__pycache__/cookie_integration_config.cpython-312.pyc differ diff --git a/Backend/src/models/__pycache__/cookie_policy.cpython-312.pyc b/Backend/src/models/__pycache__/cookie_policy.cpython-312.pyc new file mode 100644 index 00000000..82349a37 Binary files /dev/null and b/Backend/src/models/__pycache__/cookie_policy.cpython-312.pyc differ diff --git a/Backend/src/models/audit_log.py b/Backend/src/models/audit_log.py new file mode 100644 index 00000000..b566e76a --- /dev/null +++ b/Backend/src/models/audit_log.py @@ -0,0 +1,28 @@ +""" +Audit log model for tracking important actions +""" +from sqlalchemy import Column, Integer, String, Text, DateTime, ForeignKey, JSON +from sqlalchemy.orm import relationship +from datetime import datetime +from ..config.database import Base + + +class AuditLog(Base): + __tablename__ = "audit_logs" + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + user_id = Column(Integer, ForeignKey("users.id"), nullable=True, index=True) + action = Column(String(100), nullable=False, index=True) # e.g., "user.created", "booking.cancelled" + resource_type = Column(String(50), nullable=False, index=True) # e.g., "user", "booking" + resource_id = Column(Integer, nullable=True, index=True) + ip_address = Column(String(45), nullable=True) # IPv6 compatible + user_agent = Column(String(255), nullable=True) + request_id = Column(String(36), nullable=True, index=True) # UUID + details = Column(JSON, nullable=True) # Additional context + status = Column(String(20), nullable=False, default="success") # success, failed, error + error_message = Column(Text, nullable=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False, index=True) + + # Relationships + user = relationship("User", foreign_keys=[user_id]) + diff --git a/Backend/src/models/cookie_integration_config.py b/Backend/src/models/cookie_integration_config.py new file mode 100644 index 00000000..2d2b1a20 --- /dev/null +++ b/Backend/src/models/cookie_integration_config.py @@ -0,0 +1,30 @@ +from datetime import datetime + +from sqlalchemy import Column, DateTime, ForeignKey, Integer, String +from sqlalchemy.orm import relationship + +from ..config.database import Base + + +class CookieIntegrationConfig(Base): + """ + Stores IDs for well-known integrations (e.g., Google Analytics, Meta Pixel). + Does NOT allow arbitrary script injection from the dashboard. + """ + + __tablename__ = "cookie_integration_configs" + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + + ga_measurement_id = Column(String(64), nullable=True) # e.g. G-XXXXXXXXXX + fb_pixel_id = Column(String(64), nullable=True) # e.g. 1234567890 + + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column( + DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False + ) + + updated_by_id = Column(Integer, ForeignKey("users.id"), nullable=True) + updated_by = relationship("User", lazy="joined") + + diff --git a/Backend/src/models/cookie_policy.py b/Backend/src/models/cookie_policy.py new file mode 100644 index 00000000..8395b0fc --- /dev/null +++ b/Backend/src/models/cookie_policy.py @@ -0,0 +1,31 @@ +from datetime import datetime + +from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer +from sqlalchemy.orm import relationship + +from ..config.database import Base + + +class CookiePolicy(Base): + """ + Global cookie policy controlled by administrators. + + This does NOT store per-user consent; it controls which cookie categories + are available to be requested from users (e.g., disable analytics entirely). + """ + + __tablename__ = "cookie_policies" + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + + analytics_enabled = Column(Boolean, default=True, nullable=False) + marketing_enabled = Column(Boolean, default=True, nullable=False) + preferences_enabled = Column(Boolean, default=True, nullable=False) + + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + updated_by_id = Column(Integer, ForeignKey("users.id"), nullable=True) + updated_by = relationship("User", lazy="joined") + + diff --git a/Backend/src/routes/__pycache__/admin_privacy_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/admin_privacy_routes.cpython-312.pyc new file mode 100644 index 00000000..1d0e0dc1 Binary files /dev/null and b/Backend/src/routes/__pycache__/admin_privacy_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/audit_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/audit_routes.cpython-312.pyc new file mode 100644 index 00000000..c3d24857 Binary files /dev/null and b/Backend/src/routes/__pycache__/audit_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/auth_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/auth_routes.cpython-312.pyc index bf148a57..9d78f8d9 100644 Binary files a/Backend/src/routes/__pycache__/auth_routes.cpython-312.pyc and b/Backend/src/routes/__pycache__/auth_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/banner_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/banner_routes.cpython-312.pyc index 89ac3f6d..d4f80946 100644 Binary files a/Backend/src/routes/__pycache__/banner_routes.cpython-312.pyc and b/Backend/src/routes/__pycache__/banner_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/booking_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/booking_routes.cpython-312.pyc index 871d1bde..af97e9d9 100644 Binary files a/Backend/src/routes/__pycache__/booking_routes.cpython-312.pyc and b/Backend/src/routes/__pycache__/booking_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/payment_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/payment_routes.cpython-312.pyc index 3521603a..d73a5c9a 100644 Binary files a/Backend/src/routes/__pycache__/payment_routes.cpython-312.pyc and b/Backend/src/routes/__pycache__/payment_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/privacy_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/privacy_routes.cpython-312.pyc new file mode 100644 index 00000000..2a06bc84 Binary files /dev/null and b/Backend/src/routes/__pycache__/privacy_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/__pycache__/report_routes.cpython-312.pyc b/Backend/src/routes/__pycache__/report_routes.cpython-312.pyc index ad619686..77a874af 100644 Binary files a/Backend/src/routes/__pycache__/report_routes.cpython-312.pyc and b/Backend/src/routes/__pycache__/report_routes.cpython-312.pyc differ diff --git a/Backend/src/routes/admin_privacy_routes.py b/Backend/src/routes/admin_privacy_routes.py new file mode 100644 index 00000000..05da625d --- /dev/null +++ b/Backend/src/routes/admin_privacy_routes.py @@ -0,0 +1,120 @@ +from fastapi import APIRouter, Depends, status +from sqlalchemy.orm import Session + +from ..config.database import get_db +from ..middleware.auth import authorize_roles +from ..models.user import User +from ..schemas.admin_privacy import ( + CookieIntegrationSettings, + CookieIntegrationSettingsResponse, + CookiePolicySettings, + CookiePolicySettingsResponse, +) +from ..services.privacy_admin_service import privacy_admin_service + + +router = APIRouter(prefix="/admin/privacy", tags=["admin-privacy"]) + + +@router.get( + "/cookie-policy", + response_model=CookiePolicySettingsResponse, + status_code=status.HTTP_200_OK, +) +def get_cookie_policy( + db: Session = Depends(get_db), + _: User = Depends(authorize_roles("admin")), +) -> CookiePolicySettingsResponse: + """ + Get global cookie policy configuration (admin only). + """ + settings = privacy_admin_service.get_policy_settings(db) + policy = privacy_admin_service.get_or_create_policy(db) + updated_by_name = ( + policy.updated_by.full_name if getattr(policy, "updated_by", None) else None + ) + + return CookiePolicySettingsResponse( + data=settings, + updated_at=policy.updated_at, + updated_by=updated_by_name, + ) + + +@router.put( + "/cookie-policy", + response_model=CookiePolicySettingsResponse, + status_code=status.HTTP_200_OK, +) +def update_cookie_policy( + payload: CookiePolicySettings, + db: Session = Depends(get_db), + current_user: User = Depends(authorize_roles("admin")), +) -> CookiePolicySettingsResponse: + """ + Update global cookie policy configuration (admin only). + """ + policy = privacy_admin_service.update_policy(db, payload, current_user) + settings = privacy_admin_service.get_policy_settings(db) + updated_by_name = ( + policy.updated_by.full_name if getattr(policy, "updated_by", None) else None + ) + + return CookiePolicySettingsResponse( + data=settings, + updated_at=policy.updated_at, + updated_by=updated_by_name, + ) + + +@router.get( + "/integrations", + response_model=CookieIntegrationSettingsResponse, + status_code=status.HTTP_200_OK, +) +def get_cookie_integrations( + db: Session = Depends(get_db), + _: User = Depends(authorize_roles("admin")), +) -> CookieIntegrationSettingsResponse: + """ + Get IDs for third-party integrations (admin only). + """ + settings = privacy_admin_service.get_integration_settings(db) + cfg = privacy_admin_service.get_or_create_integrations(db) + updated_by_name = ( + cfg.updated_by.full_name if getattr(cfg, "updated_by", None) else None + ) + + return CookieIntegrationSettingsResponse( + data=settings, + updated_at=cfg.updated_at, + updated_by=updated_by_name, + ) + + +@router.put( + "/integrations", + response_model=CookieIntegrationSettingsResponse, + status_code=status.HTTP_200_OK, +) +def update_cookie_integrations( + payload: CookieIntegrationSettings, + db: Session = Depends(get_db), + current_user: User = Depends(authorize_roles("admin")), +) -> CookieIntegrationSettingsResponse: + """ + Update IDs for third-party integrations (admin only). + """ + cfg = privacy_admin_service.update_integrations(db, payload, current_user) + settings = privacy_admin_service.get_integration_settings(db) + updated_by_name = ( + cfg.updated_by.full_name if getattr(cfg, "updated_by", None) else None + ) + + return CookieIntegrationSettingsResponse( + data=settings, + updated_at=cfg.updated_at, + updated_by=updated_by_name, + ) + + diff --git a/Backend/src/routes/audit_routes.py b/Backend/src/routes/audit_routes.py new file mode 100644 index 00000000..dde2d513 --- /dev/null +++ b/Backend/src/routes/audit_routes.py @@ -0,0 +1,239 @@ +from fastapi import APIRouter, Depends, HTTPException, status, Query +from sqlalchemy.orm import Session +from sqlalchemy import desc, or_, func +from typing import Optional +from datetime import datetime + +from ..config.database import get_db +from ..middleware.auth import get_current_user, authorize_roles +from ..models.user import User +from ..models.audit_log import AuditLog + +router = APIRouter(prefix="/audit-logs", tags=["audit-logs"]) + + +@router.get("/") +async def get_audit_logs( + action: Optional[str] = Query(None, description="Filter by action"), + resource_type: Optional[str] = Query(None, description="Filter by resource type"), + user_id: Optional[int] = Query(None, description="Filter by user ID"), + status_filter: Optional[str] = Query(None, alias="status", description="Filter by status"), + search: Optional[str] = Query(None, description="Search in action, resource_type, or details"), + start_date: Optional[str] = Query(None, description="Start date (YYYY-MM-DD)"), + end_date: Optional[str] = Query(None, description="End date (YYYY-MM-DD)"), + page: int = Query(1, ge=1, description="Page number"), + limit: int = Query(20, ge=1, le=100, description="Items per page"), + current_user: User = Depends(authorize_roles("admin")), + db: Session = Depends(get_db) +): + """Get audit logs (Admin only)""" + try: + query = db.query(AuditLog) + + # Apply filters + if action: + query = query.filter(AuditLog.action.like(f"%{action}%")) + + if resource_type: + query = query.filter(AuditLog.resource_type == resource_type) + + if user_id: + query = query.filter(AuditLog.user_id == user_id) + + if status_filter: + query = query.filter(AuditLog.status == status_filter) + + if search: + search_filter = or_( + AuditLog.action.like(f"%{search}%"), + AuditLog.resource_type.like(f"%{search}%"), + AuditLog.ip_address.like(f"%{search}%") + ) + query = query.filter(search_filter) + + # Date range filter + if start_date: + try: + start = datetime.strptime(start_date, "%Y-%m-%d") + query = query.filter(AuditLog.created_at >= start) + except ValueError: + pass + + if end_date: + try: + end = datetime.strptime(end_date, "%Y-%m-%d") + # Set to end of day + end = end.replace(hour=23, minute=59, second=59) + query = query.filter(AuditLog.created_at <= end) + except ValueError: + pass + + # Get total count + total = query.count() + + # Apply pagination and ordering + offset = (page - 1) * limit + logs = query.order_by(desc(AuditLog.created_at)).offset(offset).limit(limit).all() + + # Format response + result = [] + for log in logs: + log_dict = { + "id": log.id, + "user_id": log.user_id, + "action": log.action, + "resource_type": log.resource_type, + "resource_id": log.resource_id, + "ip_address": log.ip_address, + "user_agent": log.user_agent, + "request_id": log.request_id, + "details": log.details, + "status": log.status, + "error_message": log.error_message, + "created_at": log.created_at.isoformat() if log.created_at else None, + } + + # Add user info if available + if log.user: + log_dict["user"] = { + "id": log.user.id, + "full_name": log.user.full_name, + "email": log.user.email, + } + + result.append(log_dict) + + return { + "status": "success", + "data": { + "logs": result, + "pagination": { + "total": total, + "page": page, + "limit": limit, + "totalPages": (total + limit - 1) // limit, + }, + }, + } + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get("/stats") +async def get_audit_stats( + start_date: Optional[str] = Query(None, description="Start date (YYYY-MM-DD)"), + end_date: Optional[str] = Query(None, description="End date (YYYY-MM-DD)"), + current_user: User = Depends(authorize_roles("admin")), + db: Session = Depends(get_db) +): + """Get audit log statistics (Admin only)""" + try: + query = db.query(AuditLog) + + # Date range filter + if start_date: + try: + start = datetime.strptime(start_date, "%Y-%m-%d") + query = query.filter(AuditLog.created_at >= start) + except ValueError: + pass + + if end_date: + try: + end = datetime.strptime(end_date, "%Y-%m-%d") + end = end.replace(hour=23, minute=59, second=59) + query = query.filter(AuditLog.created_at <= end) + except ValueError: + pass + + # Get statistics + total_logs = query.count() + success_count = query.filter(AuditLog.status == "success").count() + failed_count = query.filter(AuditLog.status == "failed").count() + error_count = query.filter(AuditLog.status == "error").count() + + # Get top actions + top_actions = ( + db.query( + AuditLog.action, + func.count(AuditLog.id).label("count") + ) + .group_by(AuditLog.action) + .order_by(desc("count")) + .limit(10) + .all() + ) + + # Get top resource types + top_resource_types = ( + db.query( + AuditLog.resource_type, + func.count(AuditLog.id).label("count") + ) + .group_by(AuditLog.resource_type) + .order_by(desc("count")) + .limit(10) + .all() + ) + + return { + "status": "success", + "data": { + "total": total_logs, + "by_status": { + "success": success_count, + "failed": failed_count, + "error": error_count, + }, + "top_actions": [{"action": action, "count": count} for action, count in top_actions], + "top_resource_types": [{"resource_type": rt, "count": count} for rt, count in top_resource_types], + }, + } + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get("/{id}") +async def get_audit_log_by_id( + id: int, + current_user: User = Depends(authorize_roles("admin")), + db: Session = Depends(get_db) +): + """Get audit log by ID (Admin only)""" + try: + log = db.query(AuditLog).filter(AuditLog.id == id).first() + + if not log: + raise HTTPException(status_code=404, detail="Audit log not found") + + log_dict = { + "id": log.id, + "user_id": log.user_id, + "action": log.action, + "resource_type": log.resource_type, + "resource_id": log.resource_id, + "ip_address": log.ip_address, + "user_agent": log.user_agent, + "request_id": log.request_id, + "details": log.details, + "status": log.status, + "error_message": log.error_message, + "created_at": log.created_at.isoformat() if log.created_at else None, + } + + if log.user: + log_dict["user"] = { + "id": log.user.id, + "full_name": log.user.full_name, + "email": log.user.email, + } + + return { + "status": "success", + "data": {"log": log_dict} + } + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + diff --git a/Backend/src/routes/auth_routes.py b/Backend/src/routes/auth_routes.py index da2a3d5f..a8f365ea 100644 --- a/Backend/src/routes/auth_routes.py +++ b/Backend/src/routes/auth_routes.py @@ -163,7 +163,12 @@ async def get_profile( """Get current user profile""" try: user = await auth_service.get_profile(db, current_user.id) - return user + return { + "status": "success", + "data": { + "user": user + } + } except ValueError as e: if "User not found" in str(e): raise HTTPException( @@ -176,6 +181,46 @@ async def get_profile( ) +@router.put("/profile") +async def update_profile( + profile_data: dict, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Update current user profile""" + try: + user = await auth_service.update_profile( + db=db, + user_id=current_user.id, + full_name=profile_data.get("full_name"), + email=profile_data.get("email"), + phone_number=profile_data.get("phone_number"), + password=profile_data.get("password"), + current_password=profile_data.get("currentPassword") + ) + return { + "status": "success", + "message": "Profile updated successfully", + "data": { + "user": user + } + } + except ValueError as e: + error_message = str(e) + status_code = status.HTTP_400_BAD_REQUEST + if "not found" in error_message.lower(): + status_code = status.HTTP_404_NOT_FOUND + raise HTTPException( + status_code=status_code, + detail=error_message + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"An error occurred: {str(e)}" + ) + + @router.post("/forgot-password", response_model=MessageResponse) async def forgot_password( request: ForgotPasswordRequest, diff --git a/Backend/src/routes/banner_routes.py b/Backend/src/routes/banner_routes.py index a6061a3a..3328f36a 100644 --- a/Backend/src/routes/banner_routes.py +++ b/Backend/src/routes/banner_routes.py @@ -1,9 +1,12 @@ -from fastapi import APIRouter, Depends, HTTPException, status, Query, Request +from fastapi import APIRouter, Depends, HTTPException, status, Query, Request, UploadFile, File from sqlalchemy.orm import Session from sqlalchemy import and_, or_ from typing import Optional from datetime import datetime +from pathlib import Path import os +import aiofiles +import uuid from ..config.database import get_db from ..middleware.auth import get_current_user, authorize_roles @@ -215,6 +218,12 @@ async def delete_banner( if not banner: raise HTTPException(status_code=404, detail="Banner not found") + # Delete image file if it exists and is a local upload + if banner.image_url and banner.image_url.startswith('/uploads/banners/'): + file_path = Path(__file__).parent.parent.parent / "uploads" / "banners" / Path(banner.image_url).name + if file_path.exists(): + file_path.unlink() + db.delete(banner) db.commit() @@ -227,3 +236,51 @@ async def delete_banner( except Exception as e: db.rollback() raise HTTPException(status_code=500, detail=str(e)) + + +@router.post("/upload", dependencies=[Depends(authorize_roles("admin"))]) +async def upload_banner_image( + request: Request, + image: UploadFile = File(...), + current_user: User = Depends(authorize_roles("admin")), +): + """Upload banner image (Admin only)""" + try: + # Validate file type + if not image.content_type or not image.content_type.startswith('image/'): + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="File must be an image" + ) + + # Create uploads directory + upload_dir = Path(__file__).parent.parent.parent / "uploads" / "banners" + upload_dir.mkdir(parents=True, exist_ok=True) + + # Generate filename + ext = Path(image.filename).suffix + filename = f"banner-{uuid.uuid4()}{ext}" + file_path = upload_dir / filename + + # Save file + async with aiofiles.open(file_path, 'wb') as f: + content = await image.read() + await f.write(content) + + # Return the image URL + image_url = f"/uploads/banners/{filename}" + base_url = get_base_url(request) + full_url = normalize_image_url(image_url, base_url) + + return { + "status": "success", + "message": "Image uploaded successfully", + "data": { + "image_url": image_url, + "full_url": full_url + } + } + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) diff --git a/Backend/src/routes/booking_routes.py b/Backend/src/routes/booking_routes.py index f370d089..37a88312 100644 --- a/Backend/src/routes/booking_routes.py +++ b/Backend/src/routes/booking_routes.py @@ -4,14 +4,21 @@ from sqlalchemy import and_, or_ from typing import Optional from datetime import datetime import random +import os from ..config.database import get_db +from ..config.settings import settings from ..middleware.auth import get_current_user, authorize_roles from ..models.user import User from ..models.booking import Booking, BookingStatus from ..models.room import Room from ..models.room_type import RoomType from ..models.payment import Payment, PaymentMethod, PaymentType, PaymentStatus +from ..utils.mailer import send_email +from ..utils.email_templates import ( + booking_confirmation_email_template, + booking_status_changed_email_template +) router = APIRouter(prefix="/bookings", tags=["bookings"]) @@ -255,6 +262,33 @@ async def create_booking( # Fetch with relations booking = db.query(Booking).filter(Booking.id == booking.id).first() + # Send booking confirmation email (non-blocking) + try: + client_url = settings.CLIENT_URL or os.getenv("CLIENT_URL", "http://localhost:5173") + room = db.query(Room).filter(Room.id == room_id).first() + room_type_name = room.room_type.name if room and room.room_type else "Room" + + email_html = booking_confirmation_email_template( + booking_number=booking.booking_number, + guest_name=current_user.full_name, + room_number=room.room_number if room else "N/A", + room_type=room_type_name, + check_in=check_in.strftime("%B %d, %Y"), + check_out=check_out.strftime("%B %d, %Y"), + num_guests=guest_count, + total_price=float(total_price), + requires_deposit=requires_deposit, + deposit_amount=deposit_amount if requires_deposit else None, + client_url=client_url + ) + await send_email( + to=current_user.email, + subject=f"Booking Confirmation - {booking.booking_number}", + html=email_html + ) + except Exception as e: + print(f"Failed to send booking confirmation email: {e}") + return { "success": True, "data": {"booking": booking}, @@ -354,6 +388,23 @@ async def cancel_booking( booking.status = BookingStatus.cancelled db.commit() + # Send cancellation email (non-blocking) + try: + client_url = settings.CLIENT_URL or os.getenv("CLIENT_URL", "http://localhost:5173") + email_html = booking_status_changed_email_template( + booking_number=booking.booking_number, + guest_name=booking.user.full_name if booking.user else "Guest", + status="cancelled", + client_url=client_url + ) + await send_email( + to=booking.user.email if booking.user else None, + subject=f"Booking Cancelled - {booking.booking_number}", + html=email_html + ) + except Exception as e: + print(f"Failed to send cancellation email: {e}") + return { "success": True, "data": {"booking": booking} @@ -378,6 +429,7 @@ async def update_booking( if not booking: raise HTTPException(status_code=404, detail="Booking not found") + old_status = booking.status status_value = booking_data.get("status") if status_value: try: @@ -388,6 +440,24 @@ async def update_booking( db.commit() db.refresh(booking) + # Send status change email if status changed (non-blocking) + if status_value and old_status != booking.status: + try: + client_url = settings.CLIENT_URL or os.getenv("CLIENT_URL", "http://localhost:5173") + email_html = booking_status_changed_email_template( + booking_number=booking.booking_number, + guest_name=booking.user.full_name if booking.user else "Guest", + status=booking.status.value, + client_url=client_url + ) + await send_email( + to=booking.user.email if booking.user else None, + subject=f"Booking Status Updated - {booking.booking_number}", + html=email_html + ) + except Exception as e: + print(f"Failed to send status change email: {e}") + return { "status": "success", "message": "Booking updated successfully", diff --git a/Backend/src/routes/payment_routes.py b/Backend/src/routes/payment_routes.py index c72a8392..554fc8e6 100644 --- a/Backend/src/routes/payment_routes.py +++ b/Backend/src/routes/payment_routes.py @@ -2,12 +2,16 @@ from fastapi import APIRouter, Depends, HTTPException, status, Query from sqlalchemy.orm import Session from typing import Optional from datetime import datetime +import os from ..config.database import get_db +from ..config.settings import settings from ..middleware.auth import get_current_user, authorize_roles from ..models.user import User from ..models.payment import Payment, PaymentMethod, PaymentType, PaymentStatus -from ..models.booking import Booking +from ..models.booking import Booking, BookingStatus +from ..utils.mailer import send_email +from ..utils.email_templates import payment_confirmation_email_template router = APIRouter(prefix="/payments", tags=["payments"]) @@ -85,6 +89,63 @@ async def get_payments( raise HTTPException(status_code=500, detail=str(e)) +@router.get("/booking/{booking_id}") +async def get_payments_by_booking_id( + booking_id: int, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Get all payments for a specific booking""" + try: + # Check if booking exists and user has access + booking = db.query(Booking).filter(Booking.id == booking_id).first() + if not booking: + raise HTTPException(status_code=404, detail="Booking not found") + + # Check access - users can only see their own bookings unless admin + if current_user.role_id != 1 and booking.user_id != current_user.id: + raise HTTPException(status_code=403, detail="Forbidden") + + # Get all payments for this booking + payments = db.query(Payment).filter(Payment.booking_id == booking_id).order_by(Payment.created_at.desc()).all() + + result = [] + for payment in payments: + payment_dict = { + "id": payment.id, + "booking_id": payment.booking_id, + "amount": float(payment.amount) if payment.amount else 0.0, + "payment_method": payment.payment_method.value if isinstance(payment.payment_method, PaymentMethod) else payment.payment_method, + "payment_type": payment.payment_type.value if isinstance(payment.payment_type, PaymentType) else payment.payment_type, + "deposit_percentage": payment.deposit_percentage, + "related_payment_id": payment.related_payment_id, + "payment_status": payment.payment_status.value if isinstance(payment.payment_status, PaymentStatus) else payment.payment_status, + "transaction_id": payment.transaction_id, + "payment_date": payment.payment_date.isoformat() if payment.payment_date else None, + "notes": payment.notes, + "created_at": payment.created_at.isoformat() if payment.created_at else None, + } + + if payment.booking: + payment_dict["booking"] = { + "id": payment.booking.id, + "booking_number": payment.booking.booking_number, + } + + result.append(payment_dict) + + return { + "status": "success", + "data": { + "payments": result + } + } + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + @router.get("/{id}") async def get_payment_by_id( id: int, @@ -169,11 +230,32 @@ async def create_payment( # If marked as paid, update status if payment_data.get("mark_as_paid"): payment.payment_status = PaymentStatus.completed + payment.payment_date = datetime.utcnow() db.add(payment) db.commit() db.refresh(payment) + # Send payment confirmation email if payment was marked as paid (non-blocking) + if payment.payment_status == PaymentStatus.completed and booking.user: + try: + client_url = settings.CLIENT_URL or os.getenv("CLIENT_URL", "http://localhost:5173") + email_html = payment_confirmation_email_template( + booking_number=booking.booking_number, + guest_name=booking.user.full_name, + amount=float(payment.amount), + payment_method=payment.payment_method.value if isinstance(payment.payment_method, PaymentMethod) else str(payment.payment_method), + transaction_id=payment.transaction_id, + client_url=client_url + ) + await send_email( + to=booking.user.email, + subject=f"Payment Confirmed - {booking.booking_number}", + html=email_html + ) + except Exception as e: + print(f"Failed to send payment confirmation email: {e}") + return { "status": "success", "message": "Payment created successfully", @@ -209,6 +291,7 @@ async def update_payment_status( if status_data.get("transaction_id"): payment.transaction_id = status_data["transaction_id"] + old_status = payment.payment_status if status_data.get("mark_as_paid"): payment.payment_status = PaymentStatus.completed payment.payment_date = datetime.utcnow() @@ -216,6 +299,37 @@ async def update_payment_status( db.commit() db.refresh(payment) + # Send payment confirmation email if payment was just completed (non-blocking) + if payment.payment_status == PaymentStatus.completed and old_status != PaymentStatus.completed: + try: + # Refresh booking relationship + payment = db.query(Payment).filter(Payment.id == id).first() + if payment.booking and payment.booking.user: + client_url = settings.CLIENT_URL or os.getenv("CLIENT_URL", "http://localhost:5173") + email_html = payment_confirmation_email_template( + booking_number=payment.booking.booking_number, + guest_name=payment.booking.user.full_name, + amount=float(payment.amount), + payment_method=payment.payment_method.value if isinstance(payment.payment_method, PaymentMethod) else str(payment.payment_method), + transaction_id=payment.transaction_id, + client_url=client_url + ) + await send_email( + to=payment.booking.user.email, + subject=f"Payment Confirmed - {payment.booking.booking_number}", + html=email_html + ) + + # If this is a deposit payment, update booking deposit_paid status + if payment.payment_type == PaymentType.deposit and payment.booking: + payment.booking.deposit_paid = True + # Optionally auto-confirm booking if deposit is paid + if payment.booking.status == BookingStatus.pending: + payment.booking.status = BookingStatus.confirmed + db.commit() + except Exception as e: + print(f"Failed to send payment confirmation email: {e}") + return { "status": "success", "message": "Payment status updated successfully", diff --git a/Backend/src/routes/privacy_routes.py b/Backend/src/routes/privacy_routes.py new file mode 100644 index 00000000..57f5233c --- /dev/null +++ b/Backend/src/routes/privacy_routes.py @@ -0,0 +1,111 @@ +from fastapi import APIRouter, Depends, Request, Response, status +from sqlalchemy.orm import Session + +from ..config.database import get_db +from ..config.logging_config import get_logger +from ..config.settings import settings +from ..middleware.cookie_consent import COOKIE_CONSENT_COOKIE_NAME, _parse_consent_cookie +from ..schemas.admin_privacy import PublicPrivacyConfigResponse +from ..schemas.privacy import ( + CookieCategoryPreferences, + CookieConsent, + CookieConsentResponse, + UpdateCookieConsentRequest, +) +from ..services.privacy_admin_service import privacy_admin_service + + +logger = get_logger(__name__) + +router = APIRouter(prefix="/privacy", tags=["privacy"]) + + +@router.get( + "/cookie-consent", + response_model=CookieConsentResponse, + status_code=status.HTTP_200_OK, +) +async def get_cookie_consent(request: Request) -> CookieConsentResponse: + """ + Return the current cookie consent preferences. + Reads from the cookie (if present) or returns default (necessary only). + """ + raw_cookie = request.cookies.get(COOKIE_CONSENT_COOKIE_NAME) + consent = _parse_consent_cookie(raw_cookie) + + # Ensure necessary is always true + consent.categories.necessary = True + + return CookieConsentResponse(data=consent) + + +@router.post( + "/cookie-consent", + response_model=CookieConsentResponse, + status_code=status.HTTP_200_OK, +) +async def update_cookie_consent( + request: UpdateCookieConsentRequest, response: Response +) -> CookieConsentResponse: + """ + Update cookie consent preferences. + + The 'necessary' category is controlled by the server and always true. + """ + # Build categories from existing cookie (if any) so partial updates work + existing_raw = response.headers.get("cookie") # usually empty here + # We can't reliably read cookies from the response; rely on defaults. + # For the purposes of this API, we always start from defaults and then + # override with the request payload. + categories = CookieCategoryPreferences() + + if request.analytics is not None: + categories.analytics = request.analytics + if request.marketing is not None: + categories.marketing = request.marketing + if request.preferences is not None: + categories.preferences = request.preferences + + # 'necessary' enforced server-side + categories.necessary = True + + consent = CookieConsent(categories=categories, has_decided=True) + + # Persist consent as a secure, HttpOnly cookie + response.set_cookie( + key=COOKIE_CONSENT_COOKIE_NAME, + value=consent.model_dump_json(), + httponly=True, + secure=settings.is_production, + samesite="lax", + max_age=365 * 24 * 60 * 60, # 1 year + path="/", + ) + + logger.info( + "Cookie consent updated: analytics=%s, marketing=%s, preferences=%s", + consent.categories.analytics, + consent.categories.marketing, + consent.categories.preferences, + ) + + return CookieConsentResponse(data=consent) + + +@router.get( + "/config", + response_model=PublicPrivacyConfigResponse, + status_code=status.HTTP_200_OK, +) +async def get_public_privacy_config( + db: Session = Depends(get_db), +) -> PublicPrivacyConfigResponse: + """ + Public privacy configuration for the frontend: + - Global policy flags + - Public integration IDs (e.g. GA measurement ID) + """ + config = privacy_admin_service.get_public_privacy_config(db) + return PublicPrivacyConfigResponse(data=config) + + diff --git a/Backend/src/routes/report_routes.py b/Backend/src/routes/report_routes.py index b0b75843..bfebd8b3 100644 --- a/Backend/src/routes/report_routes.py +++ b/Backend/src/routes/report_routes.py @@ -10,6 +10,8 @@ from ..models.user import User from ..models.booking import Booking, BookingStatus from ..models.payment import Payment, PaymentStatus from ..models.room import Room +from ..models.service_usage import ServiceUsage +from ..models.service import Service router = APIRouter(prefix="/reports", tags=["reports"]) @@ -140,6 +142,33 @@ async def get_reports( for room_id, room_number, bookings, revenue in top_rooms_data ] + # Service usage statistics + service_usage_query = db.query( + Service.id, + Service.name, + func.count(ServiceUsage.id).label('usage_count'), + func.sum(ServiceUsage.total_price).label('total_revenue') + ).join(ServiceUsage, Service.id == ServiceUsage.service_id) + + if start_date: + service_usage_query = service_usage_query.filter(ServiceUsage.usage_date >= start_date) + if end_date: + service_usage_query = service_usage_query.filter(ServiceUsage.usage_date <= end_date) + + service_usage_data = service_usage_query.group_by(Service.id, Service.name).order_by( + func.sum(ServiceUsage.total_price).desc() + ).limit(10).all() + + service_usage = [ + { + "service_id": service_id, + "service_name": service_name, + "usage_count": int(usage_count or 0), + "total_revenue": float(total_revenue or 0) + } + for service_id, service_name, usage_count, total_revenue in service_usage_data + ] + return { "status": "success", "success": True, @@ -152,6 +181,7 @@ async def get_reports( "revenue_by_date": revenue_by_date if revenue_by_date else None, "bookings_by_status": bookings_by_status, "top_rooms": top_rooms if top_rooms else None, + "service_usage": service_usage if service_usage else None, } } except Exception as e: @@ -221,6 +251,171 @@ async def get_dashboard_stats( raise HTTPException(status_code=500, detail=str(e)) +@router.get("/customer/dashboard") +async def get_customer_dashboard_stats( + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Get customer dashboard statistics""" + try: + from datetime import datetime, timedelta + + # Total bookings count for user + total_bookings = db.query(Booking).filter( + Booking.user_id == current_user.id + ).count() + + # Total spending (sum of completed payments from user's bookings) + user_bookings = db.query(Booking.id).filter( + Booking.user_id == current_user.id + ).subquery() + + total_spending = db.query(func.sum(Payment.amount)).filter( + and_( + Payment.booking_id.in_(db.query(user_bookings.c.id)), + Payment.payment_status == PaymentStatus.completed + ) + ).scalar() or 0.0 + + # Currently staying (checked_in bookings) + now = datetime.utcnow() + currently_staying = db.query(Booking).filter( + and_( + Booking.user_id == current_user.id, + Booking.status == BookingStatus.checked_in, + Booking.check_in_date <= now, + Booking.check_out_date >= now + ) + ).count() + + # Upcoming bookings (confirmed/pending with check_in_date in future) + upcoming_bookings_query = db.query(Booking).filter( + and_( + Booking.user_id == current_user.id, + Booking.status.in_([BookingStatus.confirmed, BookingStatus.pending]), + Booking.check_in_date > now + ) + ).order_by(Booking.check_in_date.asc()).limit(5).all() + + upcoming_bookings = [] + for booking in upcoming_bookings_query: + booking_dict = { + "id": booking.id, + "booking_number": booking.booking_number, + "check_in_date": booking.check_in_date.isoformat() if booking.check_in_date else None, + "check_out_date": booking.check_out_date.isoformat() if booking.check_out_date else None, + "status": booking.status.value if isinstance(booking.status, BookingStatus) else booking.status, + "total_price": float(booking.total_price) if booking.total_price else 0.0, + } + + if booking.room: + booking_dict["room"] = { + "id": booking.room.id, + "room_number": booking.room.room_number, + "room_type": { + "name": booking.room.room_type.name if booking.room.room_type else None + } + } + + upcoming_bookings.append(booking_dict) + + # Recent activity (last 5 bookings ordered by created_at) + recent_bookings_query = db.query(Booking).filter( + Booking.user_id == current_user.id + ).order_by(Booking.created_at.desc()).limit(5).all() + + recent_activity = [] + for booking in recent_bookings_query: + activity_type = None + if booking.status == BookingStatus.checked_out: + activity_type = "Check-out" + elif booking.status == BookingStatus.checked_in: + activity_type = "Check-in" + elif booking.status == BookingStatus.confirmed: + activity_type = "Booking Confirmed" + elif booking.status == BookingStatus.pending: + activity_type = "Booking" + else: + activity_type = "Booking" + + activity_dict = { + "action": activity_type, + "booking_id": booking.id, + "booking_number": booking.booking_number, + "created_at": booking.created_at.isoformat() if booking.created_at else None, + } + + if booking.room: + activity_dict["room"] = { + "room_number": booking.room.room_number, + } + + recent_activity.append(activity_dict) + + # Calculate percentage change (placeholder - can be enhanced) + # For now, compare last month vs this month + last_month_start = (now - timedelta(days=30)).replace(day=1, hour=0, minute=0, second=0) + last_month_end = now.replace(day=1, hour=0, minute=0, second=0) - timedelta(seconds=1) + + last_month_bookings = db.query(Booking).filter( + and_( + Booking.user_id == current_user.id, + Booking.created_at >= last_month_start, + Booking.created_at <= last_month_end + ) + ).count() + + this_month_bookings = db.query(Booking).filter( + and_( + Booking.user_id == current_user.id, + Booking.created_at >= now.replace(day=1, hour=0, minute=0, second=0), + Booking.created_at <= now + ) + ).count() + + booking_change_percentage = 0 + if last_month_bookings > 0: + booking_change_percentage = ((this_month_bookings - last_month_bookings) / last_month_bookings) * 100 + + last_month_spending = db.query(func.sum(Payment.amount)).filter( + and_( + Payment.booking_id.in_(db.query(user_bookings.c.id)), + Payment.payment_status == PaymentStatus.completed, + Payment.payment_date >= last_month_start, + Payment.payment_date <= last_month_end + ) + ).scalar() or 0.0 + + this_month_spending = db.query(func.sum(Payment.amount)).filter( + and_( + Payment.booking_id.in_(db.query(user_bookings.c.id)), + Payment.payment_status == PaymentStatus.completed, + Payment.payment_date >= now.replace(day=1, hour=0, minute=0, second=0), + Payment.payment_date <= now + ) + ).scalar() or 0.0 + + spending_change_percentage = 0 + if last_month_spending > 0: + spending_change_percentage = ((this_month_spending - last_month_spending) / last_month_spending) * 100 + + return { + "status": "success", + "success": True, + "data": { + "total_bookings": total_bookings, + "total_spending": float(total_spending), + "currently_staying": currently_staying, + "upcoming_bookings": upcoming_bookings, + "recent_activity": recent_activity, + "booking_change_percentage": round(booking_change_percentage, 1), + "spending_change_percentage": round(spending_change_percentage, 1), + } + } + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + @router.get("/revenue") async def get_revenue_report( start_date: Optional[str] = Query(None), diff --git a/Backend/src/schemas/__pycache__/admin_privacy.cpython-312.pyc b/Backend/src/schemas/__pycache__/admin_privacy.cpython-312.pyc new file mode 100644 index 00000000..2b5e98f9 Binary files /dev/null and b/Backend/src/schemas/__pycache__/admin_privacy.cpython-312.pyc differ diff --git a/Backend/src/schemas/__pycache__/privacy.cpython-312.pyc b/Backend/src/schemas/__pycache__/privacy.cpython-312.pyc new file mode 100644 index 00000000..df21ff7c Binary files /dev/null and b/Backend/src/schemas/__pycache__/privacy.cpython-312.pyc differ diff --git a/Backend/src/schemas/admin_privacy.py b/Backend/src/schemas/admin_privacy.py new file mode 100644 index 00000000..7000a19b --- /dev/null +++ b/Backend/src/schemas/admin_privacy.py @@ -0,0 +1,68 @@ +from datetime import datetime +from typing import Optional + +from pydantic import BaseModel, Field + + +class CookiePolicySettings(BaseModel): + """ + Admin-configurable global cookie policy. + Controls which categories can be used in the application. + """ + + analytics_enabled: bool = Field( + default=True, + description="If false, analytics cookies/scripts should not be used at all.", + ) + marketing_enabled: bool = Field( + default=True, + description="If false, marketing cookies/scripts should not be used at all.", + ) + preferences_enabled: bool = Field( + default=True, + description="If false, preference cookies should not be used at all.", + ) + + +class CookiePolicySettingsResponse(BaseModel): + status: str = Field(default="success") + data: CookiePolicySettings + updated_at: Optional[datetime] = None + updated_by: Optional[str] = None + + +class CookieIntegrationSettings(BaseModel): + """ + IDs for well-known third-party integrations, configured by admin. + """ + + ga_measurement_id: Optional[str] = Field( + default=None, description="Google Analytics 4 measurement ID (e.g. G-XXXXXXX)." + ) + fb_pixel_id: Optional[str] = Field( + default=None, description="Meta (Facebook) Pixel ID." + ) + + +class CookieIntegrationSettingsResponse(BaseModel): + status: str = Field(default="success") + data: CookieIntegrationSettings + updated_at: Optional[datetime] = None + updated_by: Optional[str] = None + + +class PublicPrivacyConfig(BaseModel): + """ + Publicly consumable privacy configuration for the frontend. + Does not expose any secrets, only IDs and flags. + """ + + policy: CookiePolicySettings + integrations: CookieIntegrationSettings + + +class PublicPrivacyConfigResponse(BaseModel): + status: str = Field(default="success") + data: PublicPrivacyConfig + + diff --git a/Backend/src/schemas/privacy.py b/Backend/src/schemas/privacy.py new file mode 100644 index 00000000..b03d35d9 --- /dev/null +++ b/Backend/src/schemas/privacy.py @@ -0,0 +1,70 @@ +from datetime import datetime +from typing import Optional + +from pydantic import BaseModel, Field + + +class CookieCategoryPreferences(BaseModel): + """ + Granular consent for different cookie categories. + + - necessary: required for the site to function (always true, not revocable) + - analytics: usage analytics, performance tracking + - marketing: advertising, remarketing cookies + - preferences: UI / language / personalization preferences + """ + + necessary: bool = Field( + default=True, + description="Strictly necessary cookies (always enabled as they are required for core functionality).", + ) + analytics: bool = Field( + default=False, description="Allow anonymous analytics and performance cookies." + ) + marketing: bool = Field( + default=False, description="Allow marketing and advertising cookies." + ) + preferences: bool = Field( + default=False, + description="Allow preference cookies (e.g. language, layout settings).", + ) + + +class CookieConsent(BaseModel): + """ + Persisted cookie consent state. + Stored in an HttpOnly cookie and exposed via the API. + """ + + version: int = Field( + default=1, description="Consent schema version for future migrations." + ) + updated_at: datetime = Field( + default_factory=datetime.utcnow, description="Last time consent was updated." + ) + has_decided: bool = Field( + default=False, + description="Whether the user has actively made a consent choice.", + ) + categories: CookieCategoryPreferences = Field( + default_factory=CookieCategoryPreferences, + description="Granular per-category consent.", + ) + + +class CookieConsentResponse(BaseModel): + status: str = Field(default="success") + data: CookieConsent + + +class UpdateCookieConsentRequest(BaseModel): + """ + Request body for updating cookie consent. + 'necessary' is ignored on write and always treated as True by the server. + """ + + analytics: Optional[bool] = None + marketing: Optional[bool] = None + preferences: Optional[bool] = None + + diff --git a/Backend/src/services/__pycache__/auth_service.cpython-312.pyc b/Backend/src/services/__pycache__/auth_service.cpython-312.pyc index d50e93b7..d3c01b91 100644 Binary files a/Backend/src/services/__pycache__/auth_service.cpython-312.pyc and b/Backend/src/services/__pycache__/auth_service.cpython-312.pyc differ diff --git a/Backend/src/services/__pycache__/privacy_admin_service.cpython-312.pyc b/Backend/src/services/__pycache__/privacy_admin_service.cpython-312.pyc new file mode 100644 index 00000000..75fd2ed3 Binary files /dev/null and b/Backend/src/services/__pycache__/privacy_admin_service.cpython-312.pyc differ diff --git a/Backend/src/services/audit_service.py b/Backend/src/services/audit_service.py new file mode 100644 index 00000000..98d52b2b --- /dev/null +++ b/Backend/src/services/audit_service.py @@ -0,0 +1,82 @@ +""" +Audit logging service for tracking important actions +""" +from sqlalchemy.orm import Session +from typing import Optional, Dict, Any +from datetime import datetime +from ..models.audit_log import AuditLog +from ..config.logging_config import get_logger + +logger = get_logger(__name__) + + +class AuditService: + """Service for creating audit log entries""" + + @staticmethod + async def log_action( + db: Session, + action: str, + resource_type: str, + user_id: Optional[int] = None, + resource_id: Optional[int] = None, + ip_address: Optional[str] = None, + user_agent: Optional[str] = None, + request_id: Optional[str] = None, + details: Optional[Dict[str, Any]] = None, + status: str = "success", + error_message: Optional[str] = None + ): + """ + Create an audit log entry + + Args: + db: Database session + action: Action performed (e.g., "user.created", "booking.cancelled") + resource_type: Type of resource (e.g., "user", "booking") + user_id: ID of user who performed the action + resource_id: ID of the resource affected + ip_address: IP address of the request + user_agent: User agent string + request_id: Request ID for tracing + details: Additional context as dictionary + status: Status of the action (success, failed, error) + error_message: Error message if action failed + """ + try: + audit_log = AuditLog( + user_id=user_id, + action=action, + resource_type=resource_type, + resource_id=resource_id, + ip_address=ip_address, + user_agent=user_agent, + request_id=request_id, + details=details, + status=status, + error_message=error_message + ) + + db.add(audit_log) + db.commit() + + logger.info( + f"Audit log created: {action} on {resource_type}", + extra={ + "action": action, + "resource_type": resource_type, + "resource_id": resource_id, + "user_id": user_id, + "status": status, + "request_id": request_id + } + ) + except Exception as e: + logger.error(f"Failed to create audit log: {str(e)}", exc_info=True) + db.rollback() + # Don't raise exception - audit logging failures shouldn't break the app + + +# Global audit service instance +audit_service = AuditService() + diff --git a/Backend/src/services/auth_service.py b/Backend/src/services/auth_service.py index 7e1234eb..823114da 100644 --- a/Backend/src/services/auth_service.py +++ b/Backend/src/services/auth_service.py @@ -5,19 +5,29 @@ import secrets import hashlib from sqlalchemy.orm import Session from typing import Optional +import logging from ..models.user import User from ..models.refresh_token import RefreshToken from ..models.password_reset_token import PasswordResetToken from ..models.role import Role from ..utils.mailer import send_email +from ..utils.email_templates import ( + welcome_email_template, + password_reset_email_template, + password_changed_email_template +) +from ..config.settings import settings import os +logger = logging.getLogger(__name__) + class AuthService: def __init__(self): - self.jwt_secret = os.getenv("JWT_SECRET") - self.jwt_refresh_secret = os.getenv("JWT_REFRESH_SECRET") + # Use settings, fallback to env vars, then to defaults for development + self.jwt_secret = getattr(settings, 'JWT_SECRET', None) or os.getenv("JWT_SECRET", "dev-secret-key-change-in-production-12345") + self.jwt_refresh_secret = os.getenv("JWT_REFRESH_SECRET") or (self.jwt_secret + "-refresh") self.jwt_expires_in = os.getenv("JWT_EXPIRES_IN", "1h") self.jwt_refresh_expires_in = os.getenv("JWT_REFRESH_EXPIRES_IN", "7d") @@ -70,6 +80,7 @@ class AuthService: "name": user.full_name, "email": user.email, "phone": user.phone, + "avatar": user.avatar, "role": user.role.name if user.role else "customer", "createdAt": user.created_at.isoformat() if user.created_at else None, "updatedAt": user.updated_at.isoformat() if user.updated_at else None, @@ -115,33 +126,16 @@ class AuthService: # Send welcome email (non-blocking) try: - client_url = os.getenv("CLIENT_URL", "http://localhost:5173") + client_url = settings.CLIENT_URL or os.getenv("CLIENT_URL", "http://localhost:5173") + email_html = welcome_email_template(user.full_name, user.email, client_url) await send_email( to=user.email, subject="Welcome to Hotel Booking", - html=f""" -
Thank you for registering an account at Hotel Booking.
-Your account has been successfully created with email: {user.email}
-You can:
-- - Login Now - -
-You (or someone) has requested to reset your password.
-Click the link below to reset your password (expires in 1 hour):
- - """ + html=email_html, + text=plain_text ) + logger.info(f"Password reset email sent successfully to {user.email} with reset URL: {reset_url}") except Exception as e: - print(f"Failed to send reset email: {e}") + logger.error(f"Failed to send password reset email to {user.email}: {type(e).__name__}: {str(e)}", exc_info=True) + # Still return success to prevent email enumeration, but log the error return { "success": True, @@ -332,13 +420,16 @@ class AuthService: # Send confirmation email (non-blocking) try: + logger.info(f"Attempting to send password changed confirmation email to {user.email}") + email_html = password_changed_email_template(user.email) await send_email( to=user.email, subject="Password Changed", - html=f"The password for account {user.email} has been changed successfully.
" + html=email_html ) + logger.info(f"Password changed confirmation email sent successfully to {user.email}") except Exception as e: - print(f"Failed to send confirmation email: {e}") + logger.error(f"Failed to send password changed confirmation email to {user.email}: {type(e).__name__}: {str(e)}", exc_info=True) return { "success": True, diff --git a/Backend/src/services/privacy_admin_service.py b/Backend/src/services/privacy_admin_service.py new file mode 100644 index 00000000..610c4b0b --- /dev/null +++ b/Backend/src/services/privacy_admin_service.py @@ -0,0 +1,98 @@ +from sqlalchemy.orm import Session + +from ..models.cookie_policy import CookiePolicy +from ..models.cookie_integration_config import CookieIntegrationConfig +from ..models.user import User +from ..schemas.admin_privacy import ( + CookieIntegrationSettings, + CookiePolicySettings, + PublicPrivacyConfig, +) + + +class PrivacyAdminService: + """ + Service layer for admin-controlled cookie policy and integrations. + """ + + # Policy + @staticmethod + def get_or_create_policy(db: Session) -> CookiePolicy: + policy = db.query(CookiePolicy).first() + if policy: + return policy + + policy = CookiePolicy() + db.add(policy) + db.commit() + db.refresh(policy) + return policy + + @staticmethod + def get_policy_settings(db: Session) -> CookiePolicySettings: + policy = PrivacyAdminService.get_or_create_policy(db) + return CookiePolicySettings( + analytics_enabled=policy.analytics_enabled, + marketing_enabled=policy.marketing_enabled, + preferences_enabled=policy.preferences_enabled, + ) + + @staticmethod + def update_policy( + db: Session, settings: CookiePolicySettings, updated_by: User | None + ) -> CookiePolicy: + policy = PrivacyAdminService.get_or_create_policy(db) + policy.analytics_enabled = settings.analytics_enabled + policy.marketing_enabled = settings.marketing_enabled + policy.preferences_enabled = settings.preferences_enabled + if updated_by: + policy.updated_by_id = updated_by.id + db.add(policy) + db.commit() + db.refresh(policy) + return policy + + # Integrations + @staticmethod + def get_or_create_integrations(db: Session) -> CookieIntegrationConfig: + config = db.query(CookieIntegrationConfig).first() + if config: + return config + config = CookieIntegrationConfig() + db.add(config) + db.commit() + db.refresh(config) + return config + + @staticmethod + def get_integration_settings(db: Session) -> CookieIntegrationSettings: + cfg = PrivacyAdminService.get_or_create_integrations(db) + return CookieIntegrationSettings( + ga_measurement_id=cfg.ga_measurement_id, + fb_pixel_id=cfg.fb_pixel_id, + ) + + @staticmethod + def update_integrations( + db: Session, settings: CookieIntegrationSettings, updated_by: User | None + ) -> CookieIntegrationConfig: + cfg = PrivacyAdminService.get_or_create_integrations(db) + cfg.ga_measurement_id = settings.ga_measurement_id + cfg.fb_pixel_id = settings.fb_pixel_id + if updated_by: + cfg.updated_by_id = updated_by.id + db.add(cfg) + db.commit() + db.refresh(cfg) + return cfg + + @staticmethod + def get_public_privacy_config(db: Session) -> PublicPrivacyConfig: + policy = PrivacyAdminService.get_policy_settings(db) + integrations = PrivacyAdminService.get_integration_settings(db) + return PublicPrivacyConfig(policy=policy, integrations=integrations) + + +privacy_admin_service = PrivacyAdminService() + + diff --git a/Backend/src/utils/__pycache__/email_templates.cpython-312.pyc b/Backend/src/utils/__pycache__/email_templates.cpython-312.pyc new file mode 100644 index 00000000..f446400c Binary files /dev/null and b/Backend/src/utils/__pycache__/email_templates.cpython-312.pyc differ diff --git a/Backend/src/utils/__pycache__/mailer.cpython-312.pyc b/Backend/src/utils/__pycache__/mailer.cpython-312.pyc index 2ebd5dde..40b320bd 100644 Binary files a/Backend/src/utils/__pycache__/mailer.cpython-312.pyc and b/Backend/src/utils/__pycache__/mailer.cpython-312.pyc differ diff --git a/Backend/src/utils/email_templates.py b/Backend/src/utils/email_templates.py new file mode 100644 index 00000000..b70ef0a9 --- /dev/null +++ b/Backend/src/utils/email_templates.py @@ -0,0 +1,261 @@ +""" +Email templates for various notifications +""" +from datetime import datetime +from typing import Optional + + +def get_base_template(content: str, title: str = "Hotel Booking") -> str: + """Base HTML email template""" + return f""" + + + + + +
+ Hotel Booking+ |
+ |
+
|
+ |
|
+ This is an automated email. Please do not reply. +© {datetime.now().year} Hotel Booking. All rights reserved. + |
+
Thank you for registering an account at Hotel Booking.
+Your account has been successfully created with email: {email}
+You can:
++ + Login Now + +
+ """ + return get_base_template(content, "Welcome to Hotel Booking") + + +def password_reset_email_template(reset_url: str) -> str: + """Password reset email template""" + content = f""" +You (or someone) has requested to reset your password.
+Click the link below to reset your password. This link will expire in 1 hour:
+ +If you did not request this, please ignore this email.
+ """ + return get_base_template(content, "Password Reset") + + +def password_changed_email_template(email: str) -> str: + """Password changed confirmation email template""" + content = f""" +The password for account {email} has been changed successfully.
+If you did not make this change, please contact our support team immediately.
+ """ + return get_base_template(content, "Password Changed") + + +def booking_confirmation_email_template( + booking_number: str, + guest_name: str, + room_number: str, + room_type: str, + check_in: str, + check_out: str, + num_guests: int, + total_price: float, + requires_deposit: bool, + deposit_amount: Optional[float] = None, + client_url: str = "http://localhost:5173" +) -> str: + """Booking confirmation email template""" + deposit_info = "" + if requires_deposit and deposit_amount: + deposit_info = f""" +⚠️ Deposit Required
+Please pay a deposit of €{deposit_amount:.2f} to confirm your booking.
+Your booking will be confirmed once the deposit is received.
+Dear {guest_name},
+Thank you for your booking! We have received your reservation request.
+ +| Booking Number: | +{booking_number} | +
| Room: | +{room_type} - Room {room_number} | +
| Check-in: | +{check_in} | +
| Check-out: | +{check_out} | +
| Guests: | +{num_guests} | +
| Total Price: | +€{total_price:.2f} | +
Dear {guest_name},
+We have successfully received your payment for booking {booking_number}.
+ +| Booking Number: | +{booking_number} | +
| Amount: | +€{amount:.2f} | +
| Payment Method: | +{payment_method} | +
Your booking is now confirmed. We look forward to hosting you!
+ ++ + View Booking + +
+ """ + return get_base_template(content, "Payment Confirmation") + + +def booking_status_changed_email_template( + booking_number: str, + guest_name: str, + status: str, + client_url: str = "http://localhost:5173" +) -> str: + """Booking status change email template""" + status_colors = { + "confirmed": ("#10B981", "Confirmed"), + "cancelled": ("#EF4444", "Cancelled"), + "checked_in": ("#3B82F6", "Checked In"), + "checked_out": ("#8B5CF6", "Checked Out"), + } + + color, status_text = status_colors.get(status.lower(), ("#6B7280", status.title())) + + content = f""" +Dear {guest_name},
+Your booking status has been updated.
+ +| Booking Number: | +{booking_number} | +
| New Status: | +{status_text} | +
+ + View Booking + +
+ """ + return get_base_template(content, f"Booking {status_text}") + diff --git a/Backend/src/utils/mailer.py b/Backend/src/utils/mailer.py index f52996a9..d059a589 100644 --- a/Backend/src/utils/mailer.py +++ b/Backend/src/utils/mailer.py @@ -2,47 +2,96 @@ import aiosmtplib from email.mime.text import MIMEText from email.mime.multipart import MIMEMultipart import os +import logging +from ..config.settings import settings + +logger = logging.getLogger(__name__) async def send_email(to: str, subject: str, html: str = None, text: str = None): """ Send email using SMTP - Requires MAIL_HOST, MAIL_USER and MAIL_PASS to be set in env. + Uses settings from config/settings.py with fallback to environment variables """ - # Require SMTP credentials to be present - mail_host = os.getenv("MAIL_HOST") - mail_user = os.getenv("MAIL_USER") - mail_pass = os.getenv("MAIL_PASS") + try: + # Get SMTP settings from settings.py, fallback to env vars + mail_host = settings.SMTP_HOST or os.getenv("MAIL_HOST") + mail_user = settings.SMTP_USER or os.getenv("MAIL_USER") + mail_pass = settings.SMTP_PASSWORD or os.getenv("MAIL_PASS") + mail_port = settings.SMTP_PORT or int(os.getenv("MAIL_PORT", "587")) + mail_secure = os.getenv("MAIL_SECURE", "false").lower() == "true" + client_url = settings.CLIENT_URL or os.getenv("CLIENT_URL", "http://localhost:5173") + + # Get from address - prefer settings, then env, then generate from client_url + from_address = settings.SMTP_FROM_EMAIL or os.getenv("MAIL_FROM") + if not from_address: + # Generate from client_url if not set + domain = client_url.replace('https://', '').replace('http://', '').split('/')[0] + from_address = f"no-reply@{domain}" + + # Use from name if available + from_name = settings.SMTP_FROM_NAME or "Hotel Booking" + from_header = f"{from_name} <{from_address}>" - if not (mail_host and mail_user and mail_pass): - raise ValueError( - "SMTP mailer not configured. Set MAIL_HOST, MAIL_USER and MAIL_PASS in env." + if not (mail_host and mail_user and mail_pass): + error_msg = "SMTP mailer not configured. Set SMTP_HOST, SMTP_USER and SMTP_PASSWORD in .env file." + logger.error(error_msg) + raise ValueError(error_msg) + + # Create message + message = MIMEMultipart("alternative") + message["From"] = from_header + message["To"] = to + message["Subject"] = subject + + if text: + message.attach(MIMEText(text, "plain")) + if html: + message.attach(MIMEText(html, "html")) + + # If no content provided, add a default text + if not text and not html: + message.attach(MIMEText("", "plain")) + + # Determine TLS/SSL settings + # For port 587: use STARTTLS (use_tls=False, start_tls=True) + # For port 465: use SSL/TLS (use_tls=True, start_tls=False) + # For port 25: plain (usually not used for authenticated sending) + if mail_port == 465 or mail_secure: + # SSL/TLS connection (port 465) + use_tls = True + start_tls = False + elif mail_port == 587: + # STARTTLS connection (port 587) + use_tls = False + start_tls = True + else: + # Plain connection (port 25 or other) + use_tls = False + start_tls = False + + logger.info(f"Attempting to send email to {to} via {mail_host}:{mail_port} (use_tls: {use_tls}, start_tls: {start_tls})") + + # Send email using SMTP client + smtp_client = aiosmtplib.SMTP( + hostname=mail_host, + port=mail_port, + use_tls=use_tls, + start_tls=start_tls, + username=mail_user, + password=mail_pass, ) - - mail_port = int(os.getenv("MAIL_PORT", "587")) - mail_secure = os.getenv("MAIL_SECURE", "false").lower() == "true" - client_url = os.getenv("CLIENT_URL", "example.com") - from_address = os.getenv("MAIL_FROM", f"no-reply@{client_url.replace('https://', '').replace('http://', '')}") - - # Create message - message = MIMEMultipart("alternative") - message["From"] = from_address - message["To"] = to - message["Subject"] = subject - - if text: - message.attach(MIMEText(text, "plain")) - if html: - message.attach(MIMEText(html, "html")) - - # Send email - await aiosmtplib.send( - message, - hostname=mail_host, - port=mail_port, - use_tls=not mail_secure and mail_port == 587, - start_tls=not mail_secure and mail_port == 587, - username=mail_user, - password=mail_pass, - ) + + try: + await smtp_client.connect() + # Authentication happens automatically if username/password are provided in constructor + await smtp_client.send_message(message) + logger.info(f"Email sent successfully to {to}") + finally: + await smtp_client.quit() + + except Exception as e: + error_msg = f"Failed to send email to {to}: {type(e).__name__}: {str(e)}" + logger.error(error_msg, exc_info=True) + raise diff --git a/Frontend/index.html b/Frontend/index.html index aa52639a..8de18e4a 100644 --- a/Frontend/index.html +++ b/Frontend/index.html @@ -4,7 +4,10 @@ -{message}
++ We use cookies to ensure a seamless booking journey, enhance performance, + and offer curated experiences. Choose a level of personalization that + matches your comfort. +
+Strictly necessary
++ Essential for security, authentication, and core booking flows. + These are always enabled. +
+{text}
+