diff --git a/Backend/alembic/__pycache__/env.cpython-312.pyc b/Backend/alembic/__pycache__/env.cpython-312.pyc new file mode 100644 index 00000000..242ab736 Binary files /dev/null and b/Backend/alembic/__pycache__/env.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/08e2f866e131_add_mfa_fields_to_users.cpython-312.pyc b/Backend/alembic/versions/__pycache__/08e2f866e131_add_mfa_fields_to_users.cpython-312.pyc new file mode 100644 index 00000000..7f2fe532 Binary files /dev/null and b/Backend/alembic/versions/__pycache__/08e2f866e131_add_mfa_fields_to_users.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/0e2dc5df18c3_add_privacy_terms_refunds_to_page_type_.cpython-312.pyc b/Backend/alembic/versions/__pycache__/0e2dc5df18c3_add_privacy_terms_refunds_to_page_type_.cpython-312.pyc new file mode 100644 index 00000000..5a6478e1 Binary files /dev/null and b/Backend/alembic/versions/__pycache__/0e2dc5df18c3_add_privacy_terms_refunds_to_page_type_.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/1444eb61188e_add_section_title_fields_to_page_content.cpython-312.pyc b/Backend/alembic/versions/__pycache__/1444eb61188e_add_section_title_fields_to_page_content.cpython-312.pyc new file mode 100644 index 00000000..4bcfed31 Binary files /dev/null and b/Backend/alembic/versions/__pycache__/1444eb61188e_add_section_title_fields_to_page_content.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/163657e72e93_add_page_content_table.cpython-312.pyc b/Backend/alembic/versions/__pycache__/163657e72e93_add_page_content_table.cpython-312.pyc new file mode 100644 index 00000000..fbc1ce4e Binary files /dev/null and b/Backend/alembic/versions/__pycache__/163657e72e93_add_page_content_table.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/17efc6439cc3_add_luxury_section_fields_to_page_.cpython-312.pyc b/Backend/alembic/versions/__pycache__/17efc6439cc3_add_luxury_section_fields_to_page_.cpython-312.pyc new file mode 100644 index 00000000..947574ae Binary files /dev/null and b/Backend/alembic/versions/__pycache__/17efc6439cc3_add_luxury_section_fields_to_page_.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/59baf2338f8a_initial_migration_create_all_tables_.cpython-312.pyc b/Backend/alembic/versions/__pycache__/59baf2338f8a_initial_migration_create_all_tables_.cpython-312.pyc new file mode 100644 index 00000000..3cdfb0fa Binary files /dev/null and b/Backend/alembic/versions/__pycache__/59baf2338f8a_initial_migration_create_all_tables_.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/6a126cc5b23c_add_capacity_room_size_view_to_rooms.cpython-312.pyc b/Backend/alembic/versions/__pycache__/6a126cc5b23c_add_capacity_room_size_view_to_rooms.cpython-312.pyc new file mode 100644 index 00000000..a1dd6121 Binary files /dev/null and b/Backend/alembic/versions/__pycache__/6a126cc5b23c_add_capacity_room_size_view_to_rooms.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/96c23dad405d_add_system_settings_table.cpython-312.pyc b/Backend/alembic/versions/__pycache__/96c23dad405d_add_system_settings_table.cpython-312.pyc new file mode 100644 index 00000000..578a5ab1 Binary files /dev/null and b/Backend/alembic/versions/__pycache__/96c23dad405d_add_system_settings_table.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/9bb08492a382_add_cancellation_accessibility_faq_to_.cpython-312.pyc b/Backend/alembic/versions/__pycache__/9bb08492a382_add_cancellation_accessibility_faq_to_.cpython-312.pyc new file mode 100644 index 00000000..dec17c65 Binary files /dev/null and b/Backend/alembic/versions/__pycache__/9bb08492a382_add_cancellation_accessibility_faq_to_.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/add_about_page_fields.cpython-312.pyc b/Backend/alembic/versions/__pycache__/add_about_page_fields.cpython-312.pyc new file mode 100644 index 00000000..4ecd91c2 Binary files /dev/null and b/Backend/alembic/versions/__pycache__/add_about_page_fields.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/add_badges_to_page_content.cpython-312.pyc b/Backend/alembic/versions/__pycache__/add_badges_to_page_content.cpython-312.pyc new file mode 100644 index 00000000..d45c92b5 Binary files /dev/null and b/Backend/alembic/versions/__pycache__/add_badges_to_page_content.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/add_blog_posts_table.cpython-312.pyc b/Backend/alembic/versions/__pycache__/add_blog_posts_table.cpython-312.pyc new file mode 100644 index 00000000..3122a342 Binary files /dev/null and b/Backend/alembic/versions/__pycache__/add_blog_posts_table.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/add_borica_payment_method.cpython-312.pyc b/Backend/alembic/versions/__pycache__/add_borica_payment_method.cpython-312.pyc new file mode 100644 index 00000000..43415a7a Binary files /dev/null and b/Backend/alembic/versions/__pycache__/add_borica_payment_method.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/add_copyright_text_to_page_content.cpython-312.pyc b/Backend/alembic/versions/__pycache__/add_copyright_text_to_page_content.cpython-312.pyc new file mode 100644 index 00000000..a792ba22 Binary files /dev/null and b/Backend/alembic/versions/__pycache__/add_copyright_text_to_page_content.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/add_enterprise_features.cpython-312.pyc b/Backend/alembic/versions/__pycache__/add_enterprise_features.cpython-312.pyc new file mode 100644 index 00000000..1aa12e12 Binary files /dev/null and b/Backend/alembic/versions/__pycache__/add_enterprise_features.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/add_group_booking_tables.cpython-312.pyc b/Backend/alembic/versions/__pycache__/add_group_booking_tables.cpython-312.pyc new file mode 100644 index 00000000..2403c8ae Binary files /dev/null and b/Backend/alembic/versions/__pycache__/add_group_booking_tables.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/add_guest_profile_crm_tables.cpython-312.pyc b/Backend/alembic/versions/__pycache__/add_guest_profile_crm_tables.cpython-312.pyc new file mode 100644 index 00000000..4e3be4d6 Binary files /dev/null and b/Backend/alembic/versions/__pycache__/add_guest_profile_crm_tables.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/add_loyalty_system_tables.cpython-312.pyc b/Backend/alembic/versions/__pycache__/add_loyalty_system_tables.cpython-312.pyc new file mode 100644 index 00000000..2ed5f96f Binary files /dev/null and b/Backend/alembic/versions/__pycache__/add_loyalty_system_tables.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/add_rate_plan_id_to_bookings.cpython-312.pyc b/Backend/alembic/versions/__pycache__/add_rate_plan_id_to_bookings.cpython-312.pyc new file mode 100644 index 00000000..a773cbb9 Binary files /dev/null and b/Backend/alembic/versions/__pycache__/add_rate_plan_id_to_bookings.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/add_sections_to_blog_posts.cpython-312.pyc b/Backend/alembic/versions/__pycache__/add_sections_to_blog_posts.cpython-312.pyc new file mode 100644 index 00000000..ce5e6499 Binary files /dev/null and b/Backend/alembic/versions/__pycache__/add_sections_to_blog_posts.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/add_stripe_payment_method.cpython-312.pyc b/Backend/alembic/versions/__pycache__/add_stripe_payment_method.cpython-312.pyc new file mode 100644 index 00000000..8682b2de Binary files /dev/null and b/Backend/alembic/versions/__pycache__/add_stripe_payment_method.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/bd309b0742c1_add_promotion_fields_to_bookings.cpython-312.pyc b/Backend/alembic/versions/__pycache__/bd309b0742c1_add_promotion_fields_to_bookings.cpython-312.pyc new file mode 100644 index 00000000..58ca6030 Binary files /dev/null and b/Backend/alembic/versions/__pycache__/bd309b0742c1_add_promotion_fields_to_bookings.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/bfa74be4b256_add_luxury_content_fields_to_page_.cpython-312.pyc b/Backend/alembic/versions/__pycache__/bfa74be4b256_add_luxury_content_fields_to_page_.cpython-312.pyc new file mode 100644 index 00000000..97ac4934 Binary files /dev/null and b/Backend/alembic/versions/__pycache__/bfa74be4b256_add_luxury_content_fields_to_page_.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/cce764ef7a50_add_map_url_to_page_content.cpython-312.pyc b/Backend/alembic/versions/__pycache__/cce764ef7a50_add_map_url_to_page_content.cpython-312.pyc new file mode 100644 index 00000000..e5e51b8b Binary files /dev/null and b/Backend/alembic/versions/__pycache__/cce764ef7a50_add_map_url_to_page_content.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/d9aff6c5f0d4_add_paypal_payment_method.cpython-312.pyc b/Backend/alembic/versions/__pycache__/d9aff6c5f0d4_add_paypal_payment_method.cpython-312.pyc new file mode 100644 index 00000000..7b437edc Binary files /dev/null and b/Backend/alembic/versions/__pycache__/d9aff6c5f0d4_add_paypal_payment_method.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/dbafe747c931_merge_enterprise_and_borica.cpython-312.pyc b/Backend/alembic/versions/__pycache__/dbafe747c931_merge_enterprise_and_borica.cpython-312.pyc new file mode 100644 index 00000000..5e116ff9 Binary files /dev/null and b/Backend/alembic/versions/__pycache__/dbafe747c931_merge_enterprise_and_borica.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/f1a2b3c4d5e6_add_is_proforma_to_invoices.cpython-312.pyc b/Backend/alembic/versions/__pycache__/f1a2b3c4d5e6_add_is_proforma_to_invoices.cpython-312.pyc new file mode 100644 index 00000000..bf4b9775 Binary files /dev/null and b/Backend/alembic/versions/__pycache__/f1a2b3c4d5e6_add_is_proforma_to_invoices.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/ff515d77abbe_add_more_luxury_sections_to_page_content.cpython-312.pyc b/Backend/alembic/versions/__pycache__/ff515d77abbe_add_more_luxury_sections_to_page_content.cpython-312.pyc new file mode 100644 index 00000000..e4d73342 Binary files /dev/null and b/Backend/alembic/versions/__pycache__/ff515d77abbe_add_more_luxury_sections_to_page_content.cpython-312.pyc differ diff --git a/Backend/alembic/versions/__pycache__/fff4b67466b3_add_account_lockout_fields_to_users.cpython-312.pyc b/Backend/alembic/versions/__pycache__/fff4b67466b3_add_account_lockout_fields_to_users.cpython-312.pyc new file mode 100644 index 00000000..bcefd045 Binary files /dev/null and b/Backend/alembic/versions/__pycache__/fff4b67466b3_add_account_lockout_fields_to_users.cpython-312.pyc differ diff --git a/Backend/alembic/versions/add_enterprise_features.py b/Backend/alembic/versions/add_enterprise_features.py new file mode 100644 index 00000000..b902117f --- /dev/null +++ b/Backend/alembic/versions/add_enterprise_features.py @@ -0,0 +1,218 @@ +""" +Add enterprise features: approval workflows, GDPR requests, sessions, webhooks, API keys. +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import mysql + +revision = 'add_enterprise_features' +down_revision = 'add_sections_blog' # Depends on latest migration +branch_labels = None +depends_on = None + +def upgrade() -> None: + # Approval workflow table + op.create_table( + 'approval_requests', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('approval_type', sa.Enum('invoice_update', 'payment_refund', 'invoice_mark_paid', 'financial_adjustment', 'user_role_change', 'large_transaction', name='approvaltype'), nullable=False), + sa.Column('status', sa.Enum('pending', 'approved', 'rejected', 'cancelled', name='approvalstatus'), nullable=False), + sa.Column('requested_by', sa.Integer(), nullable=False), + sa.Column('requested_at', sa.DateTime(), nullable=False), + sa.Column('approved_by', sa.Integer(), nullable=True), + sa.Column('approved_at', sa.DateTime(), nullable=True), + sa.Column('rejection_reason', sa.Text(), nullable=True), + sa.Column('resource_type', sa.String(length=50), nullable=False), + sa.Column('resource_id', sa.Integer(), nullable=False), + sa.Column('request_data', sa.JSON(), nullable=True), + sa.Column('current_data', sa.JSON(), nullable=True), + sa.Column('priority', sa.String(length=20), nullable=True), + sa.Column('notes', sa.Text(), nullable=True), + sa.Column('extra_metadata', sa.JSON(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['approved_by'], ['users.id'], ), + sa.ForeignKeyConstraint(['requested_by'], ['users.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_approval_requests_id'), 'approval_requests', ['id'], unique=False) + op.create_index(op.f('ix_approval_requests_approval_type'), 'approval_requests', ['approval_type'], unique=False) + op.create_index(op.f('ix_approval_requests_status'), 'approval_requests', ['status'], unique=False) + op.create_index(op.f('ix_approval_requests_requested_by'), 'approval_requests', ['requested_by'], unique=False) + op.create_index(op.f('ix_approval_requests_approved_by'), 'approval_requests', ['approved_by'], unique=False) + op.create_index(op.f('ix_approval_requests_resource_type'), 'approval_requests', ['resource_type'], unique=False) + op.create_index(op.f('ix_approval_requests_resource_id'), 'approval_requests', ['resource_id'], unique=False) + + # GDPR requests table + op.create_table( + 'gdpr_requests', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('request_type', sa.Enum('data_export', 'data_deletion', 'data_rectification', 'consent_withdrawal', name='gdprrequesttype'), nullable=False), + sa.Column('status', sa.Enum('pending', 'processing', 'completed', 'rejected', 'cancelled', name='gdprrequeststatus'), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('user_email', sa.String(length=255), nullable=False), + sa.Column('request_data', sa.JSON(), nullable=True), + sa.Column('verification_token', sa.String(length=255), nullable=True), + sa.Column('verified_at', sa.DateTime(), nullable=True), + sa.Column('processed_by', sa.Integer(), nullable=True), + sa.Column('processed_at', sa.DateTime(), nullable=True), + sa.Column('processing_notes', sa.Text(), nullable=True), + sa.Column('export_file_path', sa.String(length=500), nullable=True), + sa.Column('deletion_log', sa.JSON(), nullable=True), + sa.Column('ip_address', sa.String(length=45), nullable=True), + sa.Column('user_agent', sa.String(length=255), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=False), + sa.Column('expires_at', sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint(['processed_by'], ['users.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_gdpr_requests_id'), 'gdpr_requests', ['id'], unique=False) + op.create_index(op.f('ix_gdpr_requests_request_type'), 'gdpr_requests', ['request_type'], unique=False) + op.create_index(op.f('ix_gdpr_requests_status'), 'gdpr_requests', ['status'], unique=False) + op.create_index(op.f('ix_gdpr_requests_user_id'), 'gdpr_requests', ['user_id'], unique=False) + op.create_index(op.f('ix_gdpr_requests_verification_token'), 'gdpr_requests', ['verification_token'], unique=True) + + # User sessions table + op.create_table( + 'user_sessions', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('session_token', sa.String(length=255), nullable=False), + sa.Column('refresh_token', sa.String(length=255), nullable=True), + sa.Column('ip_address', sa.String(length=45), nullable=True), + sa.Column('user_agent', sa.String(length=500), nullable=True), + sa.Column('device_info', sa.Text(), nullable=True), + sa.Column('is_active', sa.Boolean(), nullable=False), + sa.Column('last_activity', sa.DateTime(), nullable=False), + sa.Column('expires_at', sa.DateTime(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_user_sessions_id'), 'user_sessions', ['id'], unique=False) + op.create_index(op.f('ix_user_sessions_user_id'), 'user_sessions', ['user_id'], unique=False) + op.create_index(op.f('ix_user_sessions_session_token'), 'user_sessions', ['session_token'], unique=True) + op.create_index(op.f('ix_user_sessions_refresh_token'), 'user_sessions', ['refresh_token'], unique=True) + op.create_index(op.f('ix_user_sessions_is_active'), 'user_sessions', ['is_active'], unique=False) + op.create_index(op.f('ix_user_sessions_last_activity'), 'user_sessions', ['last_activity'], unique=False) + op.create_index(op.f('ix_user_sessions_expires_at'), 'user_sessions', ['expires_at'], unique=False) + + # Webhooks table + op.create_table( + 'webhooks', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('url', sa.String(length=500), nullable=False), + sa.Column('secret', sa.String(length=255), nullable=False), + sa.Column('events', sa.JSON(), nullable=False), + sa.Column('status', sa.Enum('active', 'inactive', 'paused', name='webhookstatus'), nullable=False), + sa.Column('retry_count', sa.Integer(), nullable=False), + sa.Column('timeout_seconds', sa.Integer(), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('created_by', sa.Integer(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['created_by'], ['users.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_webhooks_id'), 'webhooks', ['id'], unique=False) + op.create_index(op.f('ix_webhooks_status'), 'webhooks', ['status'], unique=False) + + # Webhook deliveries table + op.create_table( + 'webhook_deliveries', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('webhook_id', sa.Integer(), nullable=False), + sa.Column('event_type', sa.String(length=100), nullable=False), + sa.Column('event_id', sa.String(length=255), nullable=False), + sa.Column('status', sa.Enum('pending', 'success', 'failed', 'retrying', name='webhookdeliverystatus'), nullable=False), + sa.Column('payload', sa.JSON(), nullable=False), + sa.Column('response_status', sa.Integer(), nullable=True), + sa.Column('response_body', sa.Text(), nullable=True), + sa.Column('error_message', sa.Text(), nullable=True), + sa.Column('attempt_count', sa.Integer(), nullable=False), + sa.Column('next_retry_at', sa.DateTime(), nullable=True), + sa.Column('delivered_at', sa.DateTime(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['webhook_id'], ['webhooks.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_webhook_deliveries_id'), 'webhook_deliveries', ['id'], unique=False) + op.create_index(op.f('ix_webhook_deliveries_webhook_id'), 'webhook_deliveries', ['webhook_id'], unique=False) + op.create_index(op.f('ix_webhook_deliveries_event_type'), 'webhook_deliveries', ['event_type'], unique=False) + op.create_index(op.f('ix_webhook_deliveries_event_id'), 'webhook_deliveries', ['event_id'], unique=False) + op.create_index(op.f('ix_webhook_deliveries_status'), 'webhook_deliveries', ['status'], unique=False) + op.create_index(op.f('ix_webhook_deliveries_created_at'), 'webhook_deliveries', ['created_at'], unique=False) + + # API keys table + op.create_table( + 'api_keys', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('key_hash', sa.String(length=255), nullable=False), + sa.Column('key_prefix', sa.String(length=20), nullable=False), + sa.Column('scopes', sa.JSON(), nullable=False), + sa.Column('rate_limit', sa.Integer(), nullable=False), + sa.Column('is_active', sa.Boolean(), nullable=False), + sa.Column('expires_at', sa.DateTime(), nullable=True), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('last_used_at', sa.DateTime(), nullable=True), + sa.Column('created_by', sa.Integer(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['created_by'], ['users.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_api_keys_id'), 'api_keys', ['id'], unique=False) + op.create_index(op.f('ix_api_keys_key_hash'), 'api_keys', ['key_hash'], unique=True) + op.create_index(op.f('ix_api_keys_key_prefix'), 'api_keys', ['key_prefix'], unique=False) + op.create_index(op.f('ix_api_keys_is_active'), 'api_keys', ['is_active'], unique=False) + op.create_index(op.f('ix_api_keys_expires_at'), 'api_keys', ['expires_at'], unique=False) + +def downgrade() -> None: + op.drop_index(op.f('ix_api_keys_expires_at'), table_name='api_keys') + op.drop_index(op.f('ix_api_keys_is_active'), table_name='api_keys') + op.drop_index(op.f('ix_api_keys_key_prefix'), table_name='api_keys') + op.drop_index(op.f('ix_api_keys_key_hash'), table_name='api_keys') + op.drop_index(op.f('ix_api_keys_id'), table_name='api_keys') + op.drop_table('api_keys') + + op.drop_index(op.f('ix_webhook_deliveries_created_at'), table_name='webhook_deliveries') + op.drop_index(op.f('ix_webhook_deliveries_status'), table_name='webhook_deliveries') + op.drop_index(op.f('ix_webhook_deliveries_event_id'), table_name='webhook_deliveries') + op.drop_index(op.f('ix_webhook_deliveries_event_type'), table_name='webhook_deliveries') + op.drop_index(op.f('ix_webhook_deliveries_webhook_id'), table_name='webhook_deliveries') + op.drop_index(op.f('ix_webhook_deliveries_id'), table_name='webhook_deliveries') + op.drop_table('webhook_deliveries') + + op.drop_index(op.f('ix_webhooks_status'), table_name='webhooks') + op.drop_index(op.f('ix_webhooks_id'), table_name='webhooks') + op.drop_table('webhooks') + + op.drop_index(op.f('ix_user_sessions_expires_at'), table_name='user_sessions') + op.drop_index(op.f('ix_user_sessions_last_activity'), table_name='user_sessions') + op.drop_index(op.f('ix_user_sessions_is_active'), table_name='user_sessions') + op.drop_index(op.f('ix_user_sessions_refresh_token'), table_name='user_sessions') + op.drop_index(op.f('ix_user_sessions_session_token'), table_name='user_sessions') + op.drop_index(op.f('ix_user_sessions_user_id'), table_name='user_sessions') + op.drop_index(op.f('ix_user_sessions_id'), table_name='user_sessions') + op.drop_table('user_sessions') + + op.drop_index(op.f('ix_gdpr_requests_verification_token'), table_name='gdpr_requests') + op.drop_index(op.f('ix_gdpr_requests_user_id'), table_name='gdpr_requests') + op.drop_index(op.f('ix_gdpr_requests_status'), table_name='gdpr_requests') + op.drop_index(op.f('ix_gdpr_requests_request_type'), table_name='gdpr_requests') + op.drop_index(op.f('ix_gdpr_requests_id'), table_name='gdpr_requests') + op.drop_table('gdpr_requests') + + op.drop_index(op.f('ix_approval_requests_resource_id'), table_name='approval_requests') + op.drop_index(op.f('ix_approval_requests_resource_type'), table_name='approval_requests') + op.drop_index(op.f('ix_approval_requests_approved_by'), table_name='approval_requests') + op.drop_index(op.f('ix_approval_requests_requested_by'), table_name='approval_requests') + op.drop_index(op.f('ix_approval_requests_status'), table_name='approval_requests') + op.drop_index(op.f('ix_approval_requests_approval_type'), table_name='approval_requests') + op.drop_index(op.f('ix_approval_requests_id'), table_name='approval_requests') + op.drop_table('approval_requests') + diff --git a/Backend/alembic/versions/dbafe747c931_merge_enterprise_and_borica.py b/Backend/alembic/versions/dbafe747c931_merge_enterprise_and_borica.py new file mode 100644 index 00000000..c49298dc --- /dev/null +++ b/Backend/alembic/versions/dbafe747c931_merge_enterprise_and_borica.py @@ -0,0 +1,25 @@ +"""merge_enterprise_and_borica + +Revision ID: dbafe747c931 +Revises: add_enterprise_features, add_borica_payment_method +Create Date: 2025-12-01 00:42:31.999574 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'dbafe747c931' +down_revision = ('add_enterprise_features', 'add_borica_payment_method') +branch_labels = None +depends_on = None + + +def upgrade() -> None: + pass + + +def downgrade() -> None: + pass + diff --git a/Backend/requirements.txt b/Backend/requirements.txt index 4b675f61..be18acbe 100644 --- a/Backend/requirements.txt +++ b/Backend/requirements.txt @@ -35,3 +35,12 @@ pytest-mock==3.12.0 # prometheus-client==0.19.0 # Uncomment for Prometheus metrics # sentry-sdk==1.38.0 # Uncomment for Sentry error tracking +# System Dependencies (not installable via pip - must be installed separately) +# mysqldump: Required for database backup functionality +# Ubuntu/Debian: sudo apt-get install mysql-client +# CentOS/RHEL: sudo yum install mysql (or sudo dnf install mysql) +# macOS: brew install mysql-client +# Alpine: apk add mysql-client +# Or run: bash scripts/install_mysqldump.sh +# Check status: python scripts/check_dependencies.py + diff --git a/Backend/scripts/check_dependencies.py b/Backend/scripts/check_dependencies.py new file mode 100755 index 00000000..208b553c --- /dev/null +++ b/Backend/scripts/check_dependencies.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python3 +""" +Check for required system dependencies for the Hotel Booking platform. +""" +import subprocess +import shutil +import sys +import platform + +def check_command(command, name, install_instructions): + """Check if a command is available.""" + if shutil.which(command): + version_output = subprocess.run( + [command, '--version'], + capture_output=True, + text=True, + timeout=5 + ) + version = version_output.stdout.split('\n')[0] if version_output.returncode == 0 else 'installed' + print(f"✓ {name}: {version}") + return True + else: + print(f"✗ {name}: NOT FOUND") + print(f" Installation: {install_instructions}") + return False + +def main(): + """Check all dependencies.""" + print("=" * 60) + print("Checking System Dependencies") + print("=" * 60) + print() + + all_ok = True + + # Check mysqldump + os_type = platform.system().lower() + if os_type == 'linux': + distro = platform.linux_distribution()[0].lower() if hasattr(platform, 'linux_distribution') else 'unknown' + if 'ubuntu' in distro or 'debian' in distro: + install_cmd = "sudo apt-get install mysql-client" + elif 'centos' in distro or 'rhel' in distro or 'fedora' in distro: + install_cmd = "sudo yum install mysql (or sudo dnf install mysql)" + else: + install_cmd = "Install MySQL client tools for your distribution" + elif os_type == 'darwin': + install_cmd = "brew install mysql-client" + else: + install_cmd = "Install MySQL client tools for your OS" + + if not check_command('mysqldump', 'mysqldump (MySQL client)', install_cmd): + all_ok = False + + print() + print("=" * 60) + + if all_ok: + print("✓ All dependencies are installed!") + return 0 + else: + print("✗ Some dependencies are missing.") + print() + print("You can install mysqldump using the provided script:") + print(" bash scripts/install_mysqldump.sh") + return 1 + +if __name__ == '__main__': + sys.exit(main()) + diff --git a/Backend/src/__pycache__/main.cpython-312.pyc b/Backend/src/__pycache__/main.cpython-312.pyc index 9aa33fb4..237c874f 100644 Binary files a/Backend/src/__pycache__/main.cpython-312.pyc and b/Backend/src/__pycache__/main.cpython-312.pyc differ diff --git a/Backend/src/auth/models/__pycache__/user_session.cpython-312.pyc b/Backend/src/auth/models/__pycache__/user_session.cpython-312.pyc new file mode 100644 index 00000000..2adb1198 Binary files /dev/null and b/Backend/src/auth/models/__pycache__/user_session.cpython-312.pyc differ diff --git a/Backend/src/auth/models/user_session.py b/Backend/src/auth/models/user_session.py new file mode 100644 index 00000000..78913732 --- /dev/null +++ b/Backend/src/auth/models/user_session.py @@ -0,0 +1,43 @@ +""" +User session management model. +""" +from sqlalchemy import Column, Integer, String, DateTime, ForeignKey, Boolean, Text +from sqlalchemy.orm import relationship +from datetime import datetime, timedelta +from ...shared.config.database import Base +from ...shared.config.settings import settings + +class UserSession(Base): + __tablename__ = 'user_sessions' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + user_id = Column(Integer, ForeignKey('users.id'), nullable=False, index=True) + session_token = Column(String(255), unique=True, nullable=False, index=True) + refresh_token = Column(String(255), unique=True, nullable=True, index=True) + + # Session details + ip_address = Column(String(45), nullable=True) + user_agent = Column(String(500), nullable=True) + device_info = Column(Text, nullable=True) # JSON string with device details + + # Session status + is_active = Column(Boolean, default=True, nullable=False, index=True) + last_activity = Column(DateTime, default=datetime.utcnow, nullable=False, index=True) + + # Expiration + expires_at = Column(DateTime, nullable=False, index=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + + # Relationships + user = relationship('User', foreign_keys=[user_id]) + + @property + def is_expired(self) -> bool: + """Check if session is expired.""" + return datetime.utcnow() > self.expires_at + + @property + def is_valid(self) -> bool: + """Check if session is valid (active and not expired).""" + return self.is_active and not self.is_expired + diff --git a/Backend/src/auth/routes/__pycache__/auth_routes.cpython-312.pyc b/Backend/src/auth/routes/__pycache__/auth_routes.cpython-312.pyc index 6d613b2f..237b97ce 100644 Binary files a/Backend/src/auth/routes/__pycache__/auth_routes.cpython-312.pyc and b/Backend/src/auth/routes/__pycache__/auth_routes.cpython-312.pyc differ diff --git a/Backend/src/auth/routes/__pycache__/session_routes.cpython-312.pyc b/Backend/src/auth/routes/__pycache__/session_routes.cpython-312.pyc new file mode 100644 index 00000000..c3c2c0e8 Binary files /dev/null and b/Backend/src/auth/routes/__pycache__/session_routes.cpython-312.pyc differ diff --git a/Backend/src/auth/routes/__pycache__/user_routes.cpython-312.pyc b/Backend/src/auth/routes/__pycache__/user_routes.cpython-312.pyc index 7bcf76b0..2899b11d 100644 Binary files a/Backend/src/auth/routes/__pycache__/user_routes.cpython-312.pyc and b/Backend/src/auth/routes/__pycache__/user_routes.cpython-312.pyc differ diff --git a/Backend/src/auth/routes/auth_routes.py b/Backend/src/auth/routes/auth_routes.py index 88fc6c6c..4f23935c 100644 --- a/Backend/src/auth/routes/auth_routes.py +++ b/Backend/src/auth/routes/auth_routes.py @@ -14,6 +14,7 @@ from ...analytics.services.audit_service import audit_service from slowapi import Limiter, _rate_limit_exceeded_handler from slowapi.util import get_remote_address from slowapi.errors import RateLimitExceeded +from functools import wraps router = APIRouter(prefix='/auth', tags=['auth']) @@ -282,7 +283,15 @@ async def get_profile(current_user: User=Depends(get_current_user), db: Session= raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e)) @router.put('/profile') -async def update_profile(profile_data: UpdateProfileRequest, current_user: User=Depends(get_current_user), db: Session=Depends(get_db)): +async def update_profile( + request: Request, + profile_data: UpdateProfileRequest, + current_user: User=Depends(get_current_user), + db: Session=Depends(get_db) +): + # Rate limiting is handled by global middleware (slowapi) + # The global rate limiter applies default limits to all endpoints + # For stricter limits on profile updates, configure in settings or use endpoint-specific limits try: user = await auth_service.update_profile( db=db, @@ -302,7 +311,7 @@ async def update_profile(profile_data: UpdateProfileRequest, current_user: User= status_code = status.HTTP_404_NOT_FOUND raise HTTPException(status_code=status_code, detail=error_message) except Exception as e: - raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f'An error occurred: {str(e)}') + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail='An error occurred while updating your profile. Please try again.') @router.post('/forgot-password', response_model=MessageResponse) async def forgot_password(request: ForgotPasswordRequest, db: Session=Depends(get_db)): diff --git a/Backend/src/auth/routes/session_routes.py b/Backend/src/auth/routes/session_routes.py new file mode 100644 index 00000000..1b3f3c1f --- /dev/null +++ b/Backend/src/auth/routes/session_routes.py @@ -0,0 +1,92 @@ +""" +User session management routes. +""" +from fastapi import APIRouter, Depends, HTTPException +from sqlalchemy.orm import Session +from ...shared.config.database import get_db +from ...shared.config.logging_config import get_logger +from ...security.middleware.auth import get_current_user +from ...auth.models.user import User +from ...auth.services.session_service import session_service +from ...shared.utils.response_helpers import success_response + +logger = get_logger(__name__) +router = APIRouter(prefix='/sessions', tags=['sessions']) + +@router.get('/') +async def get_my_sessions( + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Get current user's active sessions.""" + try: + sessions = session_service.get_user_sessions( + db=db, + user_id=current_user.id, + active_only=True + ) + + return success_response(data={ + 'sessions': [{ + 'id': s.id, + 'ip_address': s.ip_address, + 'user_agent': s.user_agent, + 'device_info': s.device_info, + 'last_activity': s.last_activity.isoformat() if s.last_activity else None, + 'created_at': s.created_at.isoformat() if s.created_at else None, + 'expires_at': s.expires_at.isoformat() if s.expires_at else None + } for s in sessions] + }) + except Exception as e: + logger.error(f'Error getting sessions: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + +@router.delete('/{session_id}') +async def revoke_session( + session_id: int, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Revoke a specific session.""" + try: + # Verify session belongs to user + from ...auth.models.user_session import UserSession + session = db.query(UserSession).filter( + UserSession.id == session_id, + UserSession.user_id == current_user.id + ).first() + + if not session: + raise HTTPException(status_code=404, detail='Session not found') + + success = session_service.revoke_session(db=db, session_token=session.session_token) + if not success: + raise HTTPException(status_code=404, detail='Session not found') + + return success_response(message='Session revoked successfully') + except HTTPException: + raise + except Exception as e: + logger.error(f'Error revoking session: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + +@router.post('/revoke-all') +async def revoke_all_sessions( + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Revoke all sessions for current user.""" + try: + count = session_service.revoke_all_user_sessions( + db=db, + user_id=current_user.id + ) + + return success_response( + data={'revoked_count': count}, + message=f'Revoked {count} session(s)' + ) + except Exception as e: + logger.error(f'Error revoking all sessions: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + diff --git a/Backend/src/auth/routes/user_routes.py b/Backend/src/auth/routes/user_routes.py index 8a280658..e4cf116d 100644 --- a/Backend/src/auth/routes/user_routes.py +++ b/Backend/src/auth/routes/user_routes.py @@ -21,9 +21,10 @@ async def get_users(search: Optional[str]=Query(None), role: Optional[str]=Query if search: query = query.filter(or_(User.full_name.like(f'%{search}%'), User.email.like(f'%{search}%'), User.phone.like(f'%{search}%'))) if role: - role_map = {'admin': 1, 'staff': 2, 'customer': 3, 'accountant': 4} - if role in role_map: - query = query.filter(User.role_id == role_map[role]) + # Query role by name instead of hardcoded IDs for flexibility + role_obj = db.query(Role).filter(Role.name == role).first() + if role_obj: + query = query.filter(User.role_id == role_obj.id) if status_filter: is_active = status_filter == 'active' query = query.filter(User.is_active == is_active) diff --git a/Backend/src/auth/services/__pycache__/auth_service.cpython-312.pyc b/Backend/src/auth/services/__pycache__/auth_service.cpython-312.pyc index 55e62376..a6a5b8ad 100644 Binary files a/Backend/src/auth/services/__pycache__/auth_service.cpython-312.pyc and b/Backend/src/auth/services/__pycache__/auth_service.cpython-312.pyc differ diff --git a/Backend/src/auth/services/__pycache__/session_service.cpython-312.pyc b/Backend/src/auth/services/__pycache__/session_service.cpython-312.pyc new file mode 100644 index 00000000..98e22c24 Binary files /dev/null and b/Backend/src/auth/services/__pycache__/session_service.cpython-312.pyc differ diff --git a/Backend/src/auth/services/auth_service.py b/Backend/src/auth/services/auth_service.py index e328e23a..074efffc 100644 --- a/Backend/src/auth/services/auth_service.py +++ b/Backend/src/auth/services/auth_service.py @@ -372,7 +372,8 @@ class AuthService: if full_name is not None: user.full_name = full_name if email is not None: - + # Normalize email (lowercase and trim) + email = email.lower().strip() existing_user = db.query(User).filter( User.email == email, User.id != user_id diff --git a/Backend/src/auth/services/session_service.py b/Backend/src/auth/services/session_service.py new file mode 100644 index 00000000..cda89bdf --- /dev/null +++ b/Backend/src/auth/services/session_service.py @@ -0,0 +1,149 @@ +""" +User session management service. +""" +from sqlalchemy.orm import Session +from typing import Optional, List +from datetime import datetime, timedelta +import secrets +from ..models.user_session import UserSession +from ...shared.config.settings import settings +from ...shared.config.logging_config import get_logger + +logger = get_logger(__name__) + +class SessionService: + """Service for managing user sessions.""" + + @staticmethod + def create_session( + db: Session, + user_id: int, + ip_address: Optional[str] = None, + user_agent: Optional[str] = None, + device_info: Optional[str] = None + ) -> UserSession: + """Create a new user session.""" + session_token = secrets.token_urlsafe(64) + refresh_token = secrets.token_urlsafe(64) + + # Calculate expiration + expires_at = datetime.utcnow() + timedelta(minutes=settings.JWT_ACCESS_TOKEN_EXPIRE_MINUTES) + + session = UserSession( + user_id=user_id, + session_token=session_token, + refresh_token=refresh_token, + ip_address=ip_address, + user_agent=user_agent, + device_info=device_info, + expires_at=expires_at + ) + + db.add(session) + db.commit() + db.refresh(session) + + logger.info(f'Session created for user {user_id}') + return session + + @staticmethod + def get_session( + db: Session, + session_token: str + ) -> Optional[UserSession]: + """Get a session by token.""" + return db.query(UserSession).filter( + UserSession.session_token == session_token, + UserSession.is_active == True + ).first() + + @staticmethod + def update_session_activity( + db: Session, + session_token: str + ) -> Optional[UserSession]: + """Update session last activity timestamp.""" + session = SessionService.get_session(db, session_token) + if session and session.is_valid: + session.last_activity = datetime.utcnow() + db.commit() + db.refresh(session) + return session + + @staticmethod + def revoke_session( + db: Session, + session_token: str + ) -> bool: + """Revoke a session.""" + session = db.query(UserSession).filter( + UserSession.session_token == session_token + ).first() + + if session: + session.is_active = False + db.commit() + logger.info(f'Session {session.id} revoked') + return True + return False + + @staticmethod + def revoke_all_user_sessions( + db: Session, + user_id: int, + exclude_token: Optional[str] = None + ) -> int: + """Revoke all sessions for a user, optionally excluding one.""" + query = db.query(UserSession).filter( + UserSession.user_id == user_id, + UserSession.is_active == True + ) + + if exclude_token: + query = query.filter(UserSession.session_token != exclude_token) + + sessions = query.all() + count = len(sessions) + + for session in sessions: + session.is_active = False + + db.commit() + logger.info(f'Revoked {count} sessions for user {user_id}') + return count + + @staticmethod + def get_user_sessions( + db: Session, + user_id: int, + active_only: bool = True + ) -> List[UserSession]: + """Get all sessions for a user.""" + query = db.query(UserSession).filter(UserSession.user_id == user_id) + + if active_only: + query = query.filter( + UserSession.is_active == True, + UserSession.expires_at > datetime.utcnow() + ) + + return query.order_by(UserSession.last_activity.desc()).all() + + @staticmethod + def cleanup_expired_sessions(db: Session) -> int: + """Clean up expired sessions.""" + expired = db.query(UserSession).filter( + UserSession.expires_at < datetime.utcnow(), + UserSession.is_active == True + ).all() + + count = len(expired) + for session in expired: + session.is_active = False + + db.commit() + logger.info(f'Cleaned up {count} expired sessions') + return count + +session_service = SessionService() + diff --git a/Backend/src/bookings/routes/__pycache__/booking_routes.cpython-312.pyc b/Backend/src/bookings/routes/__pycache__/booking_routes.cpython-312.pyc index c2754009..191da256 100644 Binary files a/Backend/src/bookings/routes/__pycache__/booking_routes.cpython-312.pyc and b/Backend/src/bookings/routes/__pycache__/booking_routes.cpython-312.pyc differ diff --git a/Backend/src/bookings/routes/booking_routes.py b/Backend/src/bookings/routes/booking_routes.py index c3955ad1..0b742552 100644 --- a/Backend/src/bookings/routes/booking_routes.py +++ b/Backend/src/bookings/routes/booking_routes.py @@ -306,7 +306,9 @@ async def create_booking(booking_data: CreateBookingRequest, current_user: User= if payment_method in ['stripe', 'paypal']: initial_status = BookingStatus.pending - final_notes = notes or '' + # Sanitize user-provided notes to prevent XSS + from html import escape + final_notes = escape(notes) if notes else '' if promotion_code: promotion_note = f'Promotion Code: {promotion_code}' final_notes = f'{promotion_note}\n{final_notes}'.strip() if final_notes else promotion_note @@ -507,23 +509,36 @@ async def create_booking(booking_data: CreateBookingRequest, current_user: User= return success_response(data={'booking': booking_dict}, message=message) except HTTPException: if 'transaction' in locals(): - transaction.rollback() + try: + transaction.rollback() + except Exception: + pass raise except IntegrityError as e: - transaction.rollback() + if 'transaction' in locals(): + try: + transaction.rollback() + except Exception: + pass logger.error(f'Database integrity error during booking creation: {str(e)}') raise HTTPException(status_code=409, detail='Booking conflict detected. Please try again.') except Exception as e: if 'transaction' in locals(): - transaction.rollback() + try: + transaction.rollback() + except Exception: + pass logger.error(f'Error creating booking: {str(e)}', exc_info=True) import logging import traceback logger = logging.getLogger(__name__) logger.error(f'Error creating booking (payment_method: {payment_method}): {str(e)}') logger.error(f'Traceback: {traceback.format_exc()}') - db.rollback() - raise HTTPException(status_code=500, detail=str(e)) + try: + db.rollback() + except Exception: + pass + raise HTTPException(status_code=500, detail='An error occurred while creating the booking. Please try again.') @router.get('/{id}') async def get_booking_by_id(id: int, request: Request, current_user: User=Depends(get_current_user), db: Session=Depends(get_db)): @@ -772,7 +787,9 @@ async def update_booking(id: int, booking_data: UpdateBookingRequest, current_us booking.num_guests = booking_data.guest_count if booking_data.notes is not None: - booking.special_requests = booking_data.notes + # Sanitize user-provided notes to prevent XSS + from html import escape + booking.special_requests = escape(booking_data.notes) # Restrict staff from modifying booking prices (only admin can) if booking_data.total_price is not None: @@ -1060,7 +1077,9 @@ async def admin_create_booking(booking_data: AdminCreateBookingRequest, current_ except ValueError: initial_status = BookingStatus.confirmed - final_notes = notes or '' + # Sanitize user-provided notes to prevent XSS + from html import escape + final_notes = escape(notes) if notes else '' if promotion_code: promotion_note = f'Promotion Code: {promotion_code}' final_notes = f'{promotion_note}\n{final_notes}'.strip() if final_notes else promotion_note @@ -1320,15 +1339,24 @@ async def admin_create_booking(booking_data: AdminCreateBookingRequest, current_ ) except HTTPException: if 'transaction' in locals(): - transaction.rollback() + try: + transaction.rollback() + except Exception: + pass raise except IntegrityError as e: if 'transaction' in locals(): - transaction.rollback() + try: + transaction.rollback() + except Exception: + pass logger.error(f'Database integrity error during admin booking creation: {str(e)}') raise HTTPException(status_code=409, detail='Booking conflict detected. Please try again.') except Exception as e: if 'transaction' in locals(): - transaction.rollback() + try: + transaction.rollback() + except Exception: + pass logger.error(f'Error creating booking (admin/staff): {str(e)}', exc_info=True) - raise HTTPException(status_code=500, detail='An error occurred while creating the booking') \ No newline at end of file + raise HTTPException(status_code=500, detail='An error occurred while creating the booking. Please try again.') \ No newline at end of file diff --git a/Backend/src/compliance/__init__.py b/Backend/src/compliance/__init__.py new file mode 100644 index 00000000..2bc7488b --- /dev/null +++ b/Backend/src/compliance/__init__.py @@ -0,0 +1,4 @@ +""" +GDPR compliance module. +""" + diff --git a/Backend/src/compliance/__pycache__/__init__.cpython-312.pyc b/Backend/src/compliance/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..9e495488 Binary files /dev/null and b/Backend/src/compliance/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/src/compliance/models/__pycache__/gdpr_request.cpython-312.pyc b/Backend/src/compliance/models/__pycache__/gdpr_request.cpython-312.pyc new file mode 100644 index 00000000..dc65cf39 Binary files /dev/null and b/Backend/src/compliance/models/__pycache__/gdpr_request.cpython-312.pyc differ diff --git a/Backend/src/compliance/models/gdpr_request.py b/Backend/src/compliance/models/gdpr_request.py new file mode 100644 index 00000000..57085348 --- /dev/null +++ b/Backend/src/compliance/models/gdpr_request.py @@ -0,0 +1,58 @@ +""" +GDPR compliance models for data export and deletion requests. +""" +from sqlalchemy import Column, Integer, String, Text, DateTime, ForeignKey, Enum, JSON +from sqlalchemy.orm import relationship +from datetime import datetime +import enum +from ...shared.config.database import Base + +class GDPRRequestType(str, enum.Enum): + data_export = "data_export" + data_deletion = "data_deletion" + data_rectification = "data_rectification" + consent_withdrawal = "consent_withdrawal" + +class GDPRRequestStatus(str, enum.Enum): + pending = "pending" + processing = "processing" + completed = "completed" + rejected = "rejected" + cancelled = "cancelled" + +class GDPRRequest(Base): + __tablename__ = 'gdpr_requests' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + request_type = Column(Enum(GDPRRequestType), nullable=False, index=True) + status = Column(Enum(GDPRRequestStatus), default=GDPRRequestStatus.pending, nullable=False, index=True) + + # User making the request + user_id = Column(Integer, ForeignKey('users.id'), nullable=False, index=True) + user_email = Column(String(255), nullable=False) # Store email even if user is deleted + + # Request details + request_data = Column(JSON, nullable=True) # Additional request parameters + verification_token = Column(String(255), nullable=True, unique=True, index=True) + verified_at = Column(DateTime, nullable=True) + + # Processing + processed_by = Column(Integer, ForeignKey('users.id'), nullable=True) + processed_at = Column(DateTime, nullable=True) + processing_notes = Column(Text, nullable=True) + + # Export/deletion details + export_file_path = Column(String(500), nullable=True) # Path to exported data file + deletion_log = Column(JSON, nullable=True) # Log of what was deleted + + # Metadata + ip_address = Column(String(45), nullable=True) + user_agent = Column(String(255), nullable=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False, index=True) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + expires_at = Column(DateTime, nullable=True) # For export links expiration + + # Relationships + user = relationship('User', foreign_keys=[user_id]) + processor = relationship('User', foreign_keys=[processed_by]) + diff --git a/Backend/src/compliance/routes/__init__.py b/Backend/src/compliance/routes/__init__.py new file mode 100644 index 00000000..4ee9ca16 --- /dev/null +++ b/Backend/src/compliance/routes/__init__.py @@ -0,0 +1,4 @@ +""" +GDPR compliance routes. +""" + diff --git a/Backend/src/compliance/routes/__pycache__/__init__.cpython-312.pyc b/Backend/src/compliance/routes/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..e891e0b4 Binary files /dev/null and b/Backend/src/compliance/routes/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/src/compliance/routes/__pycache__/gdpr_routes.cpython-312.pyc b/Backend/src/compliance/routes/__pycache__/gdpr_routes.cpython-312.pyc new file mode 100644 index 00000000..77f0e3f2 Binary files /dev/null and b/Backend/src/compliance/routes/__pycache__/gdpr_routes.cpython-312.pyc differ diff --git a/Backend/src/compliance/routes/gdpr_routes.py b/Backend/src/compliance/routes/gdpr_routes.py new file mode 100644 index 00000000..f899aeac --- /dev/null +++ b/Backend/src/compliance/routes/gdpr_routes.py @@ -0,0 +1,272 @@ +""" +GDPR compliance routes for data export and deletion. +""" +from fastapi import APIRouter, Depends, HTTPException, Query, Response +from sqlalchemy.orm import Session, noload +from typing import Optional +from ...shared.config.database import get_db +from ...shared.config.logging_config import get_logger +from ...security.middleware.auth import get_current_user, authorize_roles +from ...auth.models.user import User +from ..services.gdpr_service import gdpr_service +from ..models.gdpr_request import GDPRRequest, GDPRRequestType, GDPRRequestStatus +from ...shared.utils.response_helpers import success_response +from fastapi import Request + +logger = get_logger(__name__) +router = APIRouter(prefix='/gdpr', tags=['gdpr']) + +@router.post('/export') +async def request_data_export( + request: Request, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Request export of user's personal data (GDPR).""" + try: + client_ip = request.client.host if request.client else None + user_agent = request.headers.get('User-Agent') + + gdpr_request = await gdpr_service.create_data_export_request( + db=db, + user_id=current_user.id, + ip_address=client_ip, + user_agent=user_agent + ) + + return success_response( + data={ + 'request_id': gdpr_request.id, + 'verification_token': gdpr_request.verification_token, + 'status': gdpr_request.status.value, + 'expires_at': gdpr_request.expires_at.isoformat() if gdpr_request.expires_at else None + }, + message='Data export request created. You will receive an email with download link once ready.' + ) + except Exception as e: + logger.error(f'Error creating data export request: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/export/{request_id}') +async def get_export_data( + request_id: int, + verification_token: str = Query(...), + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Get exported user data.""" + try: + gdpr_request = db.query(GDPRRequest).options( + noload(GDPRRequest.user), + noload(GDPRRequest.processor) + ).filter( + GDPRRequest.id == request_id, + GDPRRequest.user_id == current_user.id, + GDPRRequest.verification_token == verification_token, + GDPRRequest.request_type == GDPRRequestType.data_export + ).first() + + if not gdpr_request: + raise HTTPException(status_code=404, detail='Export request not found or invalid token') + + if gdpr_request.status == GDPRRequestStatus.pending: + # Process export + export_data = await gdpr_service.export_user_data( + db=db, + user_id=current_user.id, + request_id=request_id + ) + return success_response(data=export_data) + elif gdpr_request.status == GDPRRequestStatus.completed and gdpr_request.export_file_path: + # Return file if exists + from pathlib import Path + file_path = Path(gdpr_request.export_file_path) + if file_path.exists(): + return Response( + content=file_path.read_bytes(), + media_type='application/json', + headers={ + 'Content-Disposition': f'attachment; filename="user_data_export_{request_id}.json"' + } + ) + + raise HTTPException(status_code=404, detail='Export file not found') + except HTTPException: + raise + except Exception as e: + logger.error(f'Error getting export data: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + +@router.post('/delete') +async def request_data_deletion( + request: Request, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Request deletion of user's personal data (GDPR - Right to be Forgotten).""" + try: + client_ip = request.client.host if request.client else None + user_agent = request.headers.get('User-Agent') + + gdpr_request = await gdpr_service.create_data_deletion_request( + db=db, + user_id=current_user.id, + ip_address=client_ip, + user_agent=user_agent + ) + + return success_response( + data={ + 'request_id': gdpr_request.id, + 'verification_token': gdpr_request.verification_token, + 'status': gdpr_request.status.value + }, + message='Data deletion request created. Please verify via email to proceed.' + ) + except Exception as e: + logger.error(f'Error creating data deletion request: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + +@router.post('/delete/{request_id}/confirm') +async def confirm_data_deletion( + request_id: int, + verification_token: str = Query(...), + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Confirm and process data deletion request.""" + try: + gdpr_request = db.query(GDPRRequest).options( + noload(GDPRRequest.user), + noload(GDPRRequest.processor) + ).filter( + GDPRRequest.id == request_id, + GDPRRequest.user_id == current_user.id, + GDPRRequest.verification_token == verification_token, + GDPRRequest.request_type == GDPRRequestType.data_deletion, + GDPRRequest.status == GDPRRequestStatus.pending + ).first() + + if not gdpr_request: + raise HTTPException(status_code=404, detail='Deletion request not found or already processed') + + # Process deletion + deletion_log = await gdpr_service.delete_user_data( + db=db, + user_id=current_user.id, + request_id=request_id, + processed_by=current_user.id + ) + + return success_response( + data=deletion_log, + message='Your data has been deleted successfully.' + ) + except HTTPException: + raise + except Exception as e: + logger.error(f'Error processing data deletion: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/requests') +async def get_user_gdpr_requests( + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Get user's GDPR requests.""" + try: + requests = db.query(GDPRRequest).options( + noload(GDPRRequest.user), + noload(GDPRRequest.processor) + ).filter( + GDPRRequest.user_id == current_user.id + ).order_by(GDPRRequest.created_at.desc()).all() + + return success_response(data={ + 'requests': [{ + 'id': req.id, + 'request_type': req.request_type.value, + 'status': req.status.value, + 'created_at': req.created_at.isoformat() if req.created_at else None, + 'processed_at': req.processed_at.isoformat() if req.processed_at else None, + } for req in requests] + }) + except Exception as e: + logger.error(f'Error getting GDPR requests: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/admin/requests') +async def get_all_gdpr_requests( + status: Optional[str] = Query(None), + page: int = Query(1, ge=1), + limit: int = Query(20, ge=1, le=100), + current_user: User = Depends(authorize_roles('admin')), + db: Session = Depends(get_db) +): + """Get all GDPR requests (admin only).""" + try: + query = db.query(GDPRRequest).options( + noload(GDPRRequest.user), + noload(GDPRRequest.processor) + ) + + if status: + try: + status_enum = GDPRRequestStatus(status) + query = query.filter(GDPRRequest.status == status_enum) + except ValueError: + raise HTTPException(status_code=400, detail=f'Invalid status: {status}') + + total = query.count() + offset = (page - 1) * limit + requests = query.order_by(GDPRRequest.created_at.desc()).offset(offset).limit(limit).all() + + return success_response(data={ + 'requests': [{ + 'id': req.id, + 'request_type': req.request_type.value, + 'status': req.status.value, + 'user_email': req.user_email, + 'created_at': req.created_at.isoformat() if req.created_at else None, + 'processed_at': req.processed_at.isoformat() if req.processed_at else None, + } for req in requests], + 'pagination': { + 'page': page, + 'limit': limit, + 'total': total, + 'total_pages': (total + limit - 1) // limit + } + }) + except HTTPException: + raise + except Exception as e: + logger.error(f'Error getting GDPR requests: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + +@router.delete('/admin/requests/{request_id}') +async def delete_gdpr_request( + request_id: int, + current_user: User = Depends(authorize_roles('admin')), + db: Session = Depends(get_db) +): + """Delete a GDPR request (admin only).""" + try: + gdpr_request = db.query(GDPRRequest).options( + noload(GDPRRequest.user), + noload(GDPRRequest.processor) + ).filter(GDPRRequest.id == request_id).first() + + if not gdpr_request: + raise HTTPException(status_code=404, detail='GDPR request not found') + + db.delete(gdpr_request) + db.commit() + + logger.info(f'GDPR request {request_id} deleted by admin {current_user.id}') + return success_response(message='GDPR request deleted successfully') + except HTTPException: + raise + except Exception as e: + logger.error(f'Error deleting GDPR request: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + diff --git a/Backend/src/compliance/services/__pycache__/gdpr_service.cpython-312.pyc b/Backend/src/compliance/services/__pycache__/gdpr_service.cpython-312.pyc new file mode 100644 index 00000000..82d2a2d2 Binary files /dev/null and b/Backend/src/compliance/services/__pycache__/gdpr_service.cpython-312.pyc differ diff --git a/Backend/src/compliance/services/gdpr_service.py b/Backend/src/compliance/services/gdpr_service.py new file mode 100644 index 00000000..d173cd4b --- /dev/null +++ b/Backend/src/compliance/services/gdpr_service.py @@ -0,0 +1,295 @@ +""" +GDPR compliance service for data export and deletion. +""" +from sqlalchemy.orm import Session +from typing import Dict, Any, Optional, List +from datetime import datetime, timedelta +import json +import secrets +import os +from pathlib import Path +from ..models.gdpr_request import GDPRRequest, GDPRRequestType, GDPRRequestStatus +from ...auth.models.user import User +from ...bookings.models.booking import Booking +from ...payments.models.payment import Payment +from ...payments.models.invoice import Invoice +from ...reviews.models.review import Review +from ...shared.config.logging_config import get_logger +from ...shared.config.settings import settings +from ...analytics.services.audit_service import audit_service + +logger = get_logger(__name__) + +class GDPRService: + """Service for GDPR compliance operations.""" + + EXPORT_EXPIRY_DAYS = 7 # Export links expire after 7 days + + @staticmethod + async def create_data_export_request( + db: Session, + user_id: int, + ip_address: Optional[str] = None, + user_agent: Optional[str] = None + ) -> GDPRRequest: + """Create a data export request.""" + user = db.query(User).filter(User.id == user_id).first() + if not user: + raise ValueError('User not found') + + verification_token = secrets.token_urlsafe(32) + expires_at = datetime.utcnow() + timedelta(days=GDPRService.EXPORT_EXPIRY_DAYS) + + gdpr_request = GDPRRequest( + request_type=GDPRRequestType.data_export, + status=GDPRRequestStatus.pending, + user_id=user_id, + user_email=user.email, + verification_token=verification_token, + ip_address=ip_address, + user_agent=user_agent, + expires_at=expires_at + ) + + db.add(gdpr_request) + db.commit() + db.refresh(gdpr_request) + + # Log GDPR request + await audit_service.log_action( + db=db, + action='gdpr_export_requested', + resource_type='gdpr_request', + user_id=user_id, + resource_id=gdpr_request.id, + ip_address=ip_address, + user_agent=user_agent, + details={'request_type': 'data_export'}, + status='success' + ) + + logger.info(f'GDPR export request created: {gdpr_request.id} for user {user_id}') + return gdpr_request + + @staticmethod + async def create_data_deletion_request( + db: Session, + user_id: int, + ip_address: Optional[str] = None, + user_agent: Optional[str] = None + ) -> GDPRRequest: + """Create a data deletion request (right to be forgotten).""" + user = db.query(User).filter(User.id == user_id).first() + if not user: + raise ValueError('User not found') + + verification_token = secrets.token_urlsafe(32) + + gdpr_request = GDPRRequest( + request_type=GDPRRequestType.data_deletion, + status=GDPRRequestStatus.pending, + user_id=user_id, + user_email=user.email, + verification_token=verification_token, + ip_address=ip_address, + user_agent=user_agent + ) + + db.add(gdpr_request) + db.commit() + db.refresh(gdpr_request) + + # Log GDPR request + await audit_service.log_action( + db=db, + action='gdpr_deletion_requested', + resource_type='gdpr_request', + user_id=user_id, + resource_id=gdpr_request.id, + ip_address=ip_address, + user_agent=user_agent, + details={'request_type': 'data_deletion'}, + status='success' + ) + + logger.info(f'GDPR deletion request created: {gdpr_request.id} for user {user_id}') + return gdpr_request + + @staticmethod + async def export_user_data( + db: Session, + user_id: int, + request_id: Optional[int] = None + ) -> Dict[str, Any]: + """Export all user data in JSON format.""" + user = db.query(User).filter(User.id == user_id).first() + if not user: + raise ValueError('User not found') + + # Collect all user data + export_data = { + 'user': { + 'id': user.id, + 'email': user.email, + 'full_name': user.full_name, + 'phone': user.phone, + 'address': user.address, + 'currency': getattr(user, 'currency', None), + 'created_at': user.created_at.isoformat() if user.created_at else None, + 'updated_at': user.updated_at.isoformat() if user.updated_at else None, + }, + 'bookings': [], + 'payments': [], + 'invoices': [], + 'reviews': [], + 'export_date': datetime.utcnow().isoformat() + } + + # Get bookings + bookings = db.query(Booking).filter(Booking.user_id == user_id).all() + for booking in bookings: + export_data['bookings'].append({ + 'id': booking.id, + 'booking_number': booking.booking_number, + 'check_in_date': booking.check_in_date.isoformat() if booking.check_in_date else None, + 'check_out_date': booking.check_out_date.isoformat() if booking.check_out_date else None, + 'status': booking.status.value if hasattr(booking.status, 'value') else str(booking.status), + 'total_price': float(booking.total_price) if booking.total_price else None, + 'created_at': booking.created_at.isoformat() if booking.created_at else None, + }) + + # Get payments + payments = db.query(Payment).filter(Payment.user_id == user_id).all() + for payment in payments: + export_data['payments'].append({ + 'id': payment.id, + 'amount': float(payment.amount) if payment.amount else None, + 'payment_method': payment.payment_method.value if hasattr(payment.payment_method, 'value') else str(payment.payment_method), + 'payment_status': payment.payment_status.value if hasattr(payment.payment_status, 'value') else str(payment.payment_status), + 'payment_date': payment.payment_date.isoformat() if payment.payment_date else None, + 'created_at': payment.created_at.isoformat() if payment.created_at else None, + }) + + # Get invoices + invoices = db.query(Invoice).filter(Invoice.user_id == user_id).all() + for invoice in invoices: + export_data['invoices'].append({ + 'id': invoice.id, + 'invoice_number': invoice.invoice_number, + 'total_amount': float(invoice.total_amount) if invoice.total_amount else None, + 'status': invoice.status.value if hasattr(invoice.status, 'value') else str(invoice.status), + 'issue_date': invoice.issue_date.isoformat() if invoice.issue_date else None, + }) + + # Get reviews + reviews = db.query(Review).filter(Review.user_id == user_id).all() + for review in reviews: + export_data['reviews'].append({ + 'id': review.id, + 'rating': review.rating, + 'comment': review.comment, + 'created_at': review.created_at.isoformat() if review.created_at else None, + }) + + # Save export file + if request_id: + export_dir = Path(settings.UPLOAD_DIR) / 'gdpr_exports' + export_dir.mkdir(parents=True, exist_ok=True) + + filename = f'user_{user_id}_export_{datetime.utcnow().strftime("%Y%m%d_%H%M%S")}.json' + file_path = export_dir / filename + + with open(file_path, 'w', encoding='utf-8') as f: + json.dump(export_data, f, indent=2, ensure_ascii=False) + + # Update GDPR request + gdpr_request = db.query(GDPRRequest).filter(GDPRRequest.id == request_id).first() + if gdpr_request: + gdpr_request.export_file_path = str(file_path) + gdpr_request.status = GDPRRequestStatus.completed + gdpr_request.processed_at = datetime.utcnow() + db.commit() + + return export_data + + @staticmethod + async def delete_user_data( + db: Session, + user_id: int, + request_id: Optional[int] = None, + processed_by: Optional[int] = None + ) -> Dict[str, Any]: + """Delete all user data (right to be forgotten).""" + user = db.query(User).filter(User.id == user_id).first() + if not user: + raise ValueError('User not found') + + deletion_log = { + 'user_id': user_id, + 'user_email': user.email, + 'deleted_at': datetime.utcnow().isoformat(), + 'deleted_items': [] + } + + # Anonymize bookings (keep for business records but remove personal data) + bookings = db.query(Booking).filter(Booking.user_id == user_id).all() + for booking in bookings: + # Keep booking but anonymize + booking.user_id = None # Or set to a system user + deletion_log['deleted_items'].append(f'booking_{booking.id}_anonymized') + + # Anonymize payments + payments = db.query(Payment).filter(Payment.user_id == user_id).all() + for payment in payments: + payment.user_id = None + deletion_log['deleted_items'].append(f'payment_{payment.id}_anonymized') + + # Anonymize invoices + invoices = db.query(Invoice).filter(Invoice.user_id == user_id).all() + for invoice in invoices: + invoice.user_id = None + invoice.customer_name = 'Deleted User' + invoice.customer_email = 'deleted@example.com' + deletion_log['deleted_items'].append(f'invoice_{invoice.id}_anonymized') + + # Delete reviews + reviews = db.query(Review).filter(Review.user_id == user_id).all() + for review in reviews: + db.delete(review) + deletion_log['deleted_items'].append(f'review_{review.id}_deleted') + + # Deactivate user account + user.is_active = False + user.email = f'deleted_{user.id}@deleted.local' + user.full_name = 'Deleted User' + user.phone = None + user.address = None + + db.commit() + + # Update GDPR request + if request_id: + gdpr_request = db.query(GDPRRequest).filter(GDPRRequest.id == request_id).first() + if gdpr_request: + gdpr_request.status = GDPRRequestStatus.completed + gdpr_request.processed_by = processed_by + gdpr_request.processed_at = datetime.utcnow() + gdpr_request.deletion_log = deletion_log + db.commit() + + # Log deletion + await audit_service.log_action( + db=db, + action='gdpr_data_deleted', + resource_type='gdpr_request', + user_id=processed_by, + resource_id=request_id, + details=deletion_log, + status='success' + ) + + logger.info(f'User data deleted for user {user_id}') + return deletion_log + +gdpr_service = GDPRService() + diff --git a/Backend/src/integrations/__init__.py b/Backend/src/integrations/__init__.py new file mode 100644 index 00000000..17c3191f --- /dev/null +++ b/Backend/src/integrations/__init__.py @@ -0,0 +1,4 @@ +""" +Integration module for webhooks and API keys. +""" + diff --git a/Backend/src/integrations/__pycache__/__init__.cpython-312.pyc b/Backend/src/integrations/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..428681ec Binary files /dev/null and b/Backend/src/integrations/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/src/integrations/models/__pycache__/api_key.cpython-312.pyc b/Backend/src/integrations/models/__pycache__/api_key.cpython-312.pyc new file mode 100644 index 00000000..f2b7e32a Binary files /dev/null and b/Backend/src/integrations/models/__pycache__/api_key.cpython-312.pyc differ diff --git a/Backend/src/integrations/models/__pycache__/webhook.cpython-312.pyc b/Backend/src/integrations/models/__pycache__/webhook.cpython-312.pyc new file mode 100644 index 00000000..af6446d1 Binary files /dev/null and b/Backend/src/integrations/models/__pycache__/webhook.cpython-312.pyc differ diff --git a/Backend/src/integrations/models/api_key.py b/Backend/src/integrations/models/api_key.py new file mode 100644 index 00000000..08b0b9dc --- /dev/null +++ b/Backend/src/integrations/models/api_key.py @@ -0,0 +1,48 @@ +""" +API key models for third-party access. +""" +from sqlalchemy import Column, Integer, String, Text, DateTime, ForeignKey, Boolean, JSON +from sqlalchemy.orm import relationship +from datetime import datetime, timedelta +from ...shared.config.database import Base + +class APIKey(Base): + __tablename__ = 'api_keys' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + name = Column(String(255), nullable=False) + key_hash = Column(String(255), unique=True, nullable=False, index=True) # Hashed API key + key_prefix = Column(String(20), nullable=False, index=True) # First 8 chars for identification + + # Permissions + scopes = Column(JSON, nullable=False) # List of allowed scopes/permissions + rate_limit = Column(Integer, default=100, nullable=False) # Requests per minute + + # Status + is_active = Column(Boolean, default=True, nullable=False, index=True) + + # Expiration + expires_at = Column(DateTime, nullable=True, index=True) + + # Metadata + description = Column(Text, nullable=True) + last_used_at = Column(DateTime, nullable=True) + created_by = Column(Integer, ForeignKey('users.id'), nullable=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + # Relationships + creator = relationship('User', foreign_keys=[created_by]) + + @property + def is_expired(self) -> bool: + """Check if API key is expired.""" + if not self.expires_at: + return False + return datetime.utcnow() > self.expires_at + + @property + def is_valid(self) -> bool: + """Check if API key is valid (active and not expired).""" + return self.is_active and not self.is_expired + diff --git a/Backend/src/integrations/models/webhook.py b/Backend/src/integrations/models/webhook.py new file mode 100644 index 00000000..77c0ac7f --- /dev/null +++ b/Backend/src/integrations/models/webhook.py @@ -0,0 +1,84 @@ +""" +Webhook models for external integrations. +""" +from sqlalchemy import Column, Integer, String, Text, DateTime, ForeignKey, Boolean, Enum, JSON +from sqlalchemy.orm import relationship +from datetime import datetime +import enum +from ...shared.config.database import Base + +class WebhookEventType(str, enum.Enum): + booking_created = "booking.created" + booking_updated = "booking.updated" + booking_cancelled = "booking.cancelled" + payment_completed = "payment.completed" + payment_failed = "payment.failed" + invoice_created = "invoice.created" + invoice_paid = "invoice.paid" + user_created = "user.created" + user_updated = "user.updated" + +class WebhookStatus(str, enum.Enum): + active = "active" + inactive = "inactive" + paused = "paused" + +class WebhookDeliveryStatus(str, enum.Enum): + pending = "pending" + success = "success" + failed = "failed" + retrying = "retrying" + +class Webhook(Base): + __tablename__ = 'webhooks' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + name = Column(String(255), nullable=False) + url = Column(String(500), nullable=False) + secret = Column(String(255), nullable=False) # For signature verification + + # Event subscriptions + events = Column(JSON, nullable=False) # List of event types + + # Status + status = Column(Enum(WebhookStatus), default=WebhookStatus.active, nullable=False, index=True) + + # Configuration + retry_count = Column(Integer, default=3, nullable=False) + timeout_seconds = Column(Integer, default=30, nullable=False) + + # Metadata + description = Column(Text, nullable=True) + created_by = Column(Integer, ForeignKey('users.id'), nullable=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + + # Relationships + creator = relationship('User', foreign_keys=[created_by]) + +class WebhookDelivery(Base): + __tablename__ = 'webhook_deliveries' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + webhook_id = Column(Integer, ForeignKey('webhooks.id'), nullable=False, index=True) + event_type = Column(String(100), nullable=False, index=True) + event_id = Column(String(255), nullable=False, index=True) + + # Delivery details + status = Column(Enum(WebhookDeliveryStatus), default=WebhookDeliveryStatus.pending, nullable=False, index=True) + payload = Column(JSON, nullable=False) + response_status = Column(Integer, nullable=True) + response_body = Column(Text, nullable=True) + error_message = Column(Text, nullable=True) + + # Retry information + attempt_count = Column(Integer, default=0, nullable=False) + next_retry_at = Column(DateTime, nullable=True) + + # Timestamps + delivered_at = Column(DateTime, nullable=True) + created_at = Column(DateTime, default=datetime.utcnow, nullable=False, index=True) + + # Relationships + webhook = relationship('Webhook', foreign_keys=[webhook_id]) + diff --git a/Backend/src/integrations/routes/__init__.py b/Backend/src/integrations/routes/__init__.py new file mode 100644 index 00000000..ea18ef22 --- /dev/null +++ b/Backend/src/integrations/routes/__init__.py @@ -0,0 +1,4 @@ +""" +Integration routes for webhooks and API keys. +""" + diff --git a/Backend/src/integrations/routes/__pycache__/__init__.cpython-312.pyc b/Backend/src/integrations/routes/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..d547dd0b Binary files /dev/null and b/Backend/src/integrations/routes/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/src/integrations/routes/__pycache__/api_key_routes.cpython-312.pyc b/Backend/src/integrations/routes/__pycache__/api_key_routes.cpython-312.pyc new file mode 100644 index 00000000..08b04694 Binary files /dev/null and b/Backend/src/integrations/routes/__pycache__/api_key_routes.cpython-312.pyc differ diff --git a/Backend/src/integrations/routes/__pycache__/webhook_routes.cpython-312.pyc b/Backend/src/integrations/routes/__pycache__/webhook_routes.cpython-312.pyc new file mode 100644 index 00000000..938bc1c1 Binary files /dev/null and b/Backend/src/integrations/routes/__pycache__/webhook_routes.cpython-312.pyc differ diff --git a/Backend/src/integrations/routes/api_key_routes.py b/Backend/src/integrations/routes/api_key_routes.py new file mode 100644 index 00000000..4905803a --- /dev/null +++ b/Backend/src/integrations/routes/api_key_routes.py @@ -0,0 +1,165 @@ +""" +API key management routes. +""" +from fastapi import APIRouter, Depends, HTTPException +from sqlalchemy.orm import Session +from typing import List, Optional +from datetime import datetime +from ...shared.config.database import get_db +from ...shared.config.logging_config import get_logger +from ...security.middleware.auth import get_current_user, authorize_roles +from ...auth.models.user import User +from ..services.api_key_service import api_key_service +from ..models.api_key import APIKey +from ...shared.utils.response_helpers import success_response +from pydantic import BaseModel + +logger = get_logger(__name__) +router = APIRouter(prefix='/api-keys', tags=['api-keys']) + +class CreateAPIKeyRequest(BaseModel): + name: str + scopes: List[str] + description: Optional[str] = None + rate_limit: int = 100 + expires_at: Optional[str] = None + +class UpdateAPIKeyRequest(BaseModel): + name: Optional[str] = None + scopes: Optional[List[str]] = None + description: Optional[str] = None + rate_limit: Optional[int] = None + expires_at: Optional[str] = None + +@router.post('/') +async def create_api_key( + key_data: CreateAPIKeyRequest, + current_user: User = Depends(authorize_roles('admin')), + db: Session = Depends(get_db) +): + """Create a new API key.""" + try: + expires_at = None + if key_data.expires_at: + expires_at = datetime.fromisoformat(key_data.expires_at.replace('Z', '+00:00')) + + api_key, plain_key = api_key_service.create_api_key( + db=db, + name=key_data.name, + scopes=key_data.scopes, + created_by=current_user.id, + description=key_data.description, + rate_limit=key_data.rate_limit, + expires_at=expires_at + ) + + return success_response( + data={ + 'api_key': { + 'id': api_key.id, + 'name': api_key.name, + 'key_prefix': api_key.key_prefix, + 'scopes': api_key.scopes, + 'rate_limit': api_key.rate_limit, + 'expires_at': api_key.expires_at.isoformat() if api_key.expires_at else None + }, + 'key': plain_key # Return plain key only on creation + }, + message='API key created successfully. Save this key securely - it will not be shown again.' + ) + except Exception as e: + logger.error(f'Error creating API key: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/') +async def get_api_keys( + current_user: User = Depends(authorize_roles('admin')), + db: Session = Depends(get_db) +): + """Get all API keys.""" + try: + api_keys = db.query(APIKey).order_by(APIKey.created_at.desc()).all() + + return success_response(data={ + 'api_keys': [{ + 'id': k.id, + 'name': k.name, + 'key_prefix': k.key_prefix, + 'scopes': k.scopes, + 'rate_limit': k.rate_limit, + 'is_active': k.is_active, + 'last_used_at': k.last_used_at.isoformat() if k.last_used_at else None, + 'expires_at': k.expires_at.isoformat() if k.expires_at else None, + 'created_at': k.created_at.isoformat() if k.created_at else None + } for k in api_keys] + }) + except Exception as e: + logger.error(f'Error getting API keys: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + +@router.put('/{key_id}') +async def update_api_key( + key_id: int, + key_data: UpdateAPIKeyRequest, + current_user: User = Depends(authorize_roles('admin')), + db: Session = Depends(get_db) +): + """Update an API key.""" + try: + expires_at = None + if key_data.expires_at: + expires_at = datetime.fromisoformat(key_data.expires_at.replace('Z', '+00:00')) + + api_key = api_key_service.update_api_key( + db=db, + key_id=key_id, + name=key_data.name, + scopes=key_data.scopes, + description=key_data.description, + rate_limit=key_data.rate_limit, + expires_at=expires_at + ) + + if not api_key: + raise HTTPException(status_code=404, detail='API key not found') + + return success_response( + data={ + 'api_key': { + 'id': api_key.id, + 'name': api_key.name, + 'key_prefix': api_key.key_prefix, + 'scopes': api_key.scopes, + 'rate_limit': api_key.rate_limit, + 'is_active': api_key.is_active, + 'expires_at': api_key.expires_at.isoformat() if api_key.expires_at else None, + 'created_at': api_key.created_at.isoformat() if api_key.created_at else None + } + }, + message='API key updated successfully' + ) + except HTTPException: + raise + except Exception as e: + logger.error(f'Error updating API key: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + +@router.delete('/{key_id}') +async def revoke_api_key( + key_id: int, + current_user: User = Depends(authorize_roles('admin')), + db: Session = Depends(get_db) +): + """Revoke an API key.""" + try: + success = api_key_service.revoke_api_key(db=db, key_id=key_id) + if not success: + raise HTTPException(status_code=404, detail='API key not found') + + return success_response(message='API key revoked successfully') + except HTTPException: + raise + except Exception as e: + logger.error(f'Error revoking API key: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + diff --git a/Backend/src/integrations/routes/webhook_routes.py b/Backend/src/integrations/routes/webhook_routes.py new file mode 100644 index 00000000..f87830fa --- /dev/null +++ b/Backend/src/integrations/routes/webhook_routes.py @@ -0,0 +1,199 @@ +""" +Webhook management routes. +""" +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlalchemy.orm import Session +from typing import List, Optional +from ...shared.config.database import get_db +from ...shared.config.logging_config import get_logger +from ...security.middleware.auth import get_current_user, authorize_roles +from ...auth.models.user import User +from ..services.webhook_service import webhook_service +from ..models.webhook import Webhook, WebhookDelivery, WebhookEventType, WebhookStatus +from ...shared.utils.response_helpers import success_response +from pydantic import BaseModel, HttpUrl + +logger = get_logger(__name__) +router = APIRouter(prefix='/webhooks', tags=['webhooks']) + +class CreateWebhookRequest(BaseModel): + name: str + url: str + events: List[str] + description: Optional[str] = None + retry_count: int = 3 + timeout_seconds: int = 30 + +@router.post('/') +async def create_webhook( + webhook_data: CreateWebhookRequest, + current_user: User = Depends(authorize_roles('admin')), + db: Session = Depends(get_db) +): + """Create a new webhook.""" + try: + webhook = webhook_service.create_webhook( + db=db, + name=webhook_data.name, + url=webhook_data.url, + events=webhook_data.events, + created_by=current_user.id, + description=webhook_data.description, + retry_count=webhook_data.retry_count, + timeout_seconds=webhook_data.timeout_seconds + ) + + return success_response( + data={'webhook': { + 'id': webhook.id, + 'name': webhook.name, + 'url': webhook.url, + 'events': webhook.events, + 'status': webhook.status.value, + 'secret': webhook.secret # Return secret only on creation + }}, + message='Webhook created successfully' + ) + except Exception as e: + logger.error(f'Error creating webhook: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/') +async def get_webhooks( + current_user: User = Depends(authorize_roles('admin')), + db: Session = Depends(get_db) +): + """Get all webhooks.""" + try: + webhooks = db.query(Webhook).order_by(Webhook.created_at.desc()).all() + + return success_response(data={ + 'webhooks': [{ + 'id': w.id, + 'name': w.name, + 'url': w.url, + 'events': w.events, + 'status': w.status.value, + 'created_at': w.created_at.isoformat() if w.created_at else None + } for w in webhooks] + }) + except Exception as e: + logger.error(f'Error getting webhooks: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/{webhook_id}/deliveries') +async def get_webhook_deliveries( + webhook_id: int, + page: int = Query(1, ge=1), + limit: int = Query(50, ge=1, le=100), + current_user: User = Depends(authorize_roles('admin')), + db: Session = Depends(get_db) +): + """Get webhook delivery history.""" + try: + offset = (page - 1) * limit + deliveries = db.query(WebhookDelivery).filter( + WebhookDelivery.webhook_id == webhook_id + ).order_by(WebhookDelivery.created_at.desc()).offset(offset).limit(limit).all() + + total = db.query(WebhookDelivery).filter( + WebhookDelivery.webhook_id == webhook_id + ).count() + + return success_response(data={ + 'deliveries': [{ + 'id': d.id, + 'event_type': d.event_type, + 'status': d.status.value, + 'response_status': d.response_status, + 'attempt_count': d.attempt_count, + 'created_at': d.created_at.isoformat() if d.created_at else None, + 'delivered_at': d.delivered_at.isoformat() if d.delivered_at else None + } for d in deliveries], + 'pagination': { + 'page': page, + 'limit': limit, + 'total': total, + 'total_pages': (total + limit - 1) // limit + } + }) + except Exception as e: + logger.error(f'Error getting webhook deliveries: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + +class UpdateWebhookRequest(BaseModel): + name: Optional[str] = None + url: Optional[str] = None + events: Optional[List[str]] = None + description: Optional[str] = None + status: Optional[str] = None + retry_count: Optional[int] = None + timeout_seconds: Optional[int] = None + +@router.put('/{webhook_id}') +async def update_webhook( + webhook_id: int, + webhook_data: UpdateWebhookRequest, + current_user: User = Depends(authorize_roles('admin')), + db: Session = Depends(get_db) +): + """Update a webhook.""" + try: + status_enum = None + if webhook_data.status: + try: + status_enum = WebhookStatus(webhook_data.status) + except ValueError: + raise HTTPException(status_code=400, detail=f'Invalid status: {webhook_data.status}') + + webhook = webhook_service.update_webhook( + db=db, + webhook_id=webhook_id, + name=webhook_data.name, + url=webhook_data.url, + events=webhook_data.events, + description=webhook_data.description, + status=status_enum, + retry_count=webhook_data.retry_count, + timeout_seconds=webhook_data.timeout_seconds + ) + + if not webhook: + raise HTTPException(status_code=404, detail='Webhook not found') + + return success_response( + data={'webhook': { + 'id': webhook.id, + 'name': webhook.name, + 'url': webhook.url, + 'events': webhook.events, + 'status': webhook.status.value, + 'created_at': webhook.created_at.isoformat() if webhook.created_at else None + }}, + message='Webhook updated successfully' + ) + except HTTPException: + raise + except Exception as e: + logger.error(f'Error updating webhook: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + +@router.delete('/{webhook_id}') +async def delete_webhook( + webhook_id: int, + current_user: User = Depends(authorize_roles('admin')), + db: Session = Depends(get_db) +): + """Delete a webhook.""" + try: + success = webhook_service.delete_webhook(db=db, webhook_id=webhook_id) + if not success: + raise HTTPException(status_code=404, detail='Webhook not found') + + return success_response(message='Webhook deleted successfully') + except HTTPException: + raise + except Exception as e: + logger.error(f'Error deleting webhook: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + diff --git a/Backend/src/integrations/services/__pycache__/api_key_service.cpython-312.pyc b/Backend/src/integrations/services/__pycache__/api_key_service.cpython-312.pyc new file mode 100644 index 00000000..d0924059 Binary files /dev/null and b/Backend/src/integrations/services/__pycache__/api_key_service.cpython-312.pyc differ diff --git a/Backend/src/integrations/services/__pycache__/webhook_service.cpython-312.pyc b/Backend/src/integrations/services/__pycache__/webhook_service.cpython-312.pyc new file mode 100644 index 00000000..6a9af2a5 Binary files /dev/null and b/Backend/src/integrations/services/__pycache__/webhook_service.cpython-312.pyc differ diff --git a/Backend/src/integrations/services/api_key_service.py b/Backend/src/integrations/services/api_key_service.py new file mode 100644 index 00000000..0ec2f602 --- /dev/null +++ b/Backend/src/integrations/services/api_key_service.py @@ -0,0 +1,133 @@ +""" +API key service for third-party access management. +""" +from sqlalchemy.orm import Session +from typing import List, Optional +from datetime import datetime +import secrets +import hashlib +from ..models.api_key import APIKey +from ...shared.config.logging_config import get_logger + +logger = get_logger(__name__) + +class APIKeyService: + """Service for managing API keys.""" + + @staticmethod + def generate_api_key() -> tuple[str, str]: + """Generate a new API key and return (key, hash).""" + # Generate key in format: hb_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + key = f"hb_{secrets.token_urlsafe(32)}" + key_hash = hashlib.sha256(key.encode('utf-8')).hexdigest() + key_prefix = key[:11] # "hb_" + 8 chars + return key, key_hash, key_prefix + + @staticmethod + def create_api_key( + db: Session, + name: str, + scopes: List[str], + created_by: int, + description: Optional[str] = None, + rate_limit: int = 100, + expires_at: Optional[datetime] = None + ) -> tuple[APIKey, str]: + """Create a new API key and return (api_key_model, plain_key).""" + key, key_hash, key_prefix = APIKeyService.generate_api_key() + + api_key = APIKey( + name=name, + key_hash=key_hash, + key_prefix=key_prefix, + scopes=scopes, + rate_limit=rate_limit, + created_by=created_by, + description=description, + expires_at=expires_at + ) + + db.add(api_key) + db.commit() + db.refresh(api_key) + + logger.info(f'API key created: {api_key.id} - {name}') + return api_key, key # Return plain key only once + + @staticmethod + def verify_api_key(db: Session, api_key: str) -> Optional[APIKey]: + """Verify an API key and return the APIKey model if valid.""" + key_hash = hashlib.sha256(api_key.encode('utf-8')).hexdigest() + + api_key_model = db.query(APIKey).filter( + APIKey.key_hash == key_hash, + APIKey.is_active == True + ).first() + + if api_key_model and api_key_model.is_valid: + # Update last used timestamp + api_key_model.last_used_at = datetime.utcnow() + db.commit() + return api_key_model + + return None + + @staticmethod + def revoke_api_key(db: Session, key_id: int) -> bool: + """Revoke an API key.""" + api_key = db.query(APIKey).filter(APIKey.id == key_id).first() + if api_key: + api_key.is_active = False + db.commit() + logger.info(f'API key {key_id} revoked') + return True + return False + + @staticmethod + def get_user_api_keys( + db: Session, + created_by: int, + active_only: bool = True + ) -> List[APIKey]: + """Get all API keys created by a user.""" + query = db.query(APIKey).filter(APIKey.created_by == created_by) + + if active_only: + query = query.filter(APIKey.is_active == True) + + return query.order_by(APIKey.created_at.desc()).all() + + @staticmethod + def update_api_key( + db: Session, + key_id: int, + name: Optional[str] = None, + scopes: Optional[List[str]] = None, + description: Optional[str] = None, + rate_limit: Optional[int] = None, + expires_at: Optional[datetime] = None + ) -> Optional[APIKey]: + """Update an API key.""" + api_key = db.query(APIKey).filter(APIKey.id == key_id).first() + if not api_key: + return None + + if name is not None: + api_key.name = name + if scopes is not None: + api_key.scopes = scopes + if description is not None: + api_key.description = description + if rate_limit is not None: + api_key.rate_limit = rate_limit + if expires_at is not None: + api_key.expires_at = expires_at + + db.commit() + db.refresh(api_key) + + logger.info(f'API key updated: {api_key.id} - {api_key.name}') + return api_key + +api_key_service = APIKeyService() + diff --git a/Backend/src/integrations/services/webhook_service.py b/Backend/src/integrations/services/webhook_service.py new file mode 100644 index 00000000..6d05ae70 --- /dev/null +++ b/Backend/src/integrations/services/webhook_service.py @@ -0,0 +1,218 @@ +""" +Webhook service for external integrations. +""" +from sqlalchemy.orm import Session +from typing import List, Dict, Any, Optional +from datetime import datetime, timedelta +import secrets +import httpx +import hmac +import hashlib +import json +from ..models.webhook import Webhook, WebhookDelivery, WebhookEventType, WebhookStatus, WebhookDeliveryStatus +from ...shared.config.logging_config import get_logger +from ...analytics.services.audit_service import audit_service + +logger = get_logger(__name__) + +class WebhookService: + """Service for managing webhooks.""" + + @staticmethod + def create_webhook( + db: Session, + name: str, + url: str, + events: List[str], + created_by: int, + description: Optional[str] = None, + retry_count: int = 3, + timeout_seconds: int = 30 + ) -> Webhook: + """Create a new webhook.""" + secret = secrets.token_urlsafe(32) + + webhook = Webhook( + name=name, + url=url, + secret=secret, + events=events, + created_by=created_by, + description=description, + retry_count=retry_count, + timeout_seconds=timeout_seconds + ) + + db.add(webhook) + db.commit() + db.refresh(webhook) + + logger.info(f'Webhook created: {webhook.id} - {name}') + return webhook + + @staticmethod + def update_webhook( + db: Session, + webhook_id: int, + name: Optional[str] = None, + url: Optional[str] = None, + events: Optional[List[str]] = None, + description: Optional[str] = None, + status: Optional[WebhookStatus] = None, + retry_count: Optional[int] = None, + timeout_seconds: Optional[int] = None + ) -> Optional[Webhook]: + """Update a webhook.""" + webhook = db.query(Webhook).filter(Webhook.id == webhook_id).first() + if not webhook: + return None + + if name is not None: + webhook.name = name + if url is not None: + webhook.url = url + if events is not None: + webhook.events = events + if description is not None: + webhook.description = description + if status is not None: + webhook.status = status + if retry_count is not None: + webhook.retry_count = retry_count + if timeout_seconds is not None: + webhook.timeout_seconds = timeout_seconds + + db.commit() + db.refresh(webhook) + + logger.info(f'Webhook updated: {webhook.id} - {webhook.name}') + return webhook + + @staticmethod + def delete_webhook(db: Session, webhook_id: int) -> bool: + """Delete a webhook.""" + webhook = db.query(Webhook).filter(Webhook.id == webhook_id).first() + if not webhook: + return False + + db.delete(webhook) + db.commit() + + logger.info(f'Webhook deleted: {webhook_id}') + return True + + @staticmethod + def generate_signature(payload: str, secret: str) -> str: + """Generate HMAC signature for webhook payload.""" + return hmac.new( + secret.encode('utf-8'), + payload.encode('utf-8'), + hashlib.sha256 + ).hexdigest() + + @staticmethod + async def deliver_webhook( + db: Session, + event_type: str, + event_id: str, + payload: Dict[str, Any] + ) -> List[WebhookDelivery]: + """Deliver webhook event to all subscribed webhooks.""" + # Find active webhooks subscribed to this event + webhooks = db.query(Webhook).filter( + Webhook.status == WebhookStatus.active, + Webhook.events.contains([event_type]) + ).all() + + deliveries = [] + + for webhook in webhooks: + delivery = WebhookDelivery( + webhook_id=webhook.id, + event_type=event_type, + event_id=event_id, + payload=payload, + status=WebhookDeliveryStatus.pending + ) + db.add(delivery) + db.commit() + db.refresh(delivery) + + # Deliver asynchronously + try: + await WebhookService._send_webhook(webhook, delivery, payload) + except Exception as e: + logger.error(f'Error delivering webhook {delivery.id}: {str(e)}') + + deliveries.append(delivery) + + return deliveries + + @staticmethod + async def _send_webhook( + webhook: Webhook, + delivery: WebhookDelivery, + payload: Dict[str, Any] + ): + """Send webhook HTTP request.""" + payload_str = json.dumps(payload) + signature = WebhookService.generate_signature(payload_str, webhook.secret) + + headers = { + 'Content-Type': 'application/json', + 'X-Webhook-Signature': signature, + 'X-Webhook-Event': delivery.event_type, + 'X-Webhook-Id': str(delivery.id) + } + + try: + async with httpx.AsyncClient(timeout=webhook.timeout_seconds) as client: + response = await client.post( + webhook.url, + content=payload_str, + headers=headers + ) + + delivery.response_status = response.status_code + delivery.response_body = response.text[:1000] # Limit response body size + delivery.attempt_count += 1 + + if response.status_code >= 200 and response.status_code < 300: + delivery.status = WebhookDeliveryStatus.success + delivery.delivered_at = datetime.utcnow() + else: + delivery.status = WebhookDeliveryStatus.failed + delivery.error_message = f'HTTP {response.status_code}' + + # Schedule retry if attempts remaining + if delivery.attempt_count < webhook.retry_count: + delivery.status = WebhookDeliveryStatus.retrying + delivery.next_retry_at = datetime.utcnow() + timedelta( + minutes=2 ** delivery.attempt_count # Exponential backoff + ) + except Exception as e: + delivery.status = WebhookDeliveryStatus.failed + delivery.error_message = str(e)[:500] + delivery.attempt_count += 1 + + # Schedule retry if attempts remaining + if delivery.attempt_count < webhook.retry_count: + delivery.status = WebhookDeliveryStatus.retrying + delivery.next_retry_at = datetime.utcnow() + timedelta( + minutes=2 ** delivery.attempt_count + ) + + # Update delivery in database + from ...shared.config.database import SessionLocal + db = SessionLocal() + try: + db.merge(delivery) + db.commit() + except Exception as e: + logger.error(f'Error updating webhook delivery: {str(e)}') + db.rollback() + finally: + db.close() + +webhook_service = WebhookService() + diff --git a/Backend/src/main.py b/Backend/src/main.py index b48a45cc..ea098347 100644 --- a/Backend/src/main.py +++ b/Backend/src/main.py @@ -49,6 +49,9 @@ from .content.routes import privacy_routes app = FastAPI(title=settings.APP_NAME, description='Enterprise-grade Hotel Booking API', version=settings.APP_VERSION, docs_url='/api/docs' if not settings.is_production else None, redoc_url='/api/redoc' if not settings.is_production else None, openapi_url='/api/openapi.json' if not settings.is_production else None) app.add_middleware(RequestIDMiddleware) app.add_middleware(CookieConsentMiddleware) +# Add API versioning middleware +from .shared.middleware.api_versioning import APIVersioningMiddleware +app.add_middleware(APIVersioningMiddleware, default_version='v1') if settings.REQUEST_TIMEOUT > 0: app.add_middleware(TimeoutMiddleware) app.add_middleware(SecurityHeadersMiddleware) @@ -61,10 +64,12 @@ if settings.IP_WHITELIST_ENABLED: app.add_middleware(AdminIPWhitelistMiddleware) logger.info(f'Admin IP whitelisting enabled with {len(settings.ADMIN_IP_WHITELIST)} IP(s)/CIDR range(s)') if settings.RATE_LIMIT_ENABLED: - limiter = Limiter(key_func=get_remote_address, default_limits=[f'{settings.RATE_LIMIT_PER_MINUTE}/minute']) + # Use role-based rate limiting + from .security.middleware.role_based_rate_limit import create_role_based_limiter + limiter = create_role_based_limiter() app.state.limiter = limiter app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler) - logger.info(f'Rate limiting enabled: {settings.RATE_LIMIT_PER_MINUTE} requests/minute') + logger.info(f'Role-based rate limiting enabled: Admin={settings.RATE_LIMIT_ADMIN_PER_MINUTE}/min, Staff={settings.RATE_LIMIT_STAFF_PER_MINUTE}/min, Accountant={settings.RATE_LIMIT_ACCOUNTANT_PER_MINUTE}/min, Customer={settings.RATE_LIMIT_CUSTOMER_PER_MINUTE}/min, Default={settings.RATE_LIMIT_PER_MINUTE}/min') # CORS middleware must be added LAST to handle OPTIONS preflight requests before other middleware # In FastAPI/Starlette, middleware is executed in reverse order (last added = first executed = outermost) @@ -211,8 +216,11 @@ from .guest_management.routes.complaint_routes import router as complaint_routes from .notifications.routes import chat_routes, notification_routes, email_campaign_routes from .analytics.routes import analytics_routes, report_routes, audit_routes from .security.routes import security_routes, compliance_routes -from .system.routes import system_settings_routes, workflow_routes, task_routes +from .system.routes import system_settings_routes, workflow_routes, task_routes, approval_routes, backup_routes from .ai.routes import ai_assistant_routes +from .compliance.routes import gdpr_routes +from .integrations.routes import webhook_routes, api_key_routes +from .auth.routes import session_routes # Register all routes with /api prefix (removed duplicate registrations) # Using /api prefix as standard, API versioning can be handled via headers if needed @@ -264,6 +272,12 @@ app.include_router(email_campaign_routes.router, prefix=api_prefix) app.include_router(page_content_routes.router, prefix=api_prefix) app.include_router(blog_routes.router, prefix=api_prefix) app.include_router(ai_assistant_routes.router, prefix=api_prefix) +app.include_router(approval_routes.router, prefix=api_prefix) +app.include_router(gdpr_routes.router, prefix=api_prefix) +app.include_router(webhook_routes.router, prefix=api_prefix) +app.include_router(api_key_routes.router, prefix=api_prefix) +app.include_router(session_routes.router, prefix=api_prefix) +app.include_router(backup_routes.router, prefix=api_prefix) logger.info('All routes registered successfully') def ensure_jwt_secret(): diff --git a/Backend/src/payments/routes/__pycache__/invoice_routes.cpython-312.pyc b/Backend/src/payments/routes/__pycache__/invoice_routes.cpython-312.pyc index 138e3ae3..337f4185 100644 Binary files a/Backend/src/payments/routes/__pycache__/invoice_routes.cpython-312.pyc and b/Backend/src/payments/routes/__pycache__/invoice_routes.cpython-312.pyc differ diff --git a/Backend/src/payments/routes/invoice_routes.py b/Backend/src/payments/routes/invoice_routes.py index 23b9dc48..b73e4c29 100644 --- a/Backend/src/payments/routes/invoice_routes.py +++ b/Backend/src/payments/routes/invoice_routes.py @@ -98,7 +98,7 @@ async def create_invoice(request: Request, invoice_data: CreateInvoiceRequest, c raise HTTPException(status_code=500, detail=str(e)) @router.put('/{id}') -async def update_invoice(request: Request, id: int, invoice_data: UpdateInvoiceRequest, current_user: User=Depends(authorize_roles('admin', 'staff', 'accountant')), db: Session=Depends(get_db)): +async def update_invoice(request: Request, id: int, invoice_data: UpdateInvoiceRequest, current_user: User=Depends(authorize_roles('admin', 'accountant')), db: Session=Depends(get_db)): try: invoice = db.query(Invoice).filter(Invoice.id == id).first() if not invoice: @@ -119,7 +119,7 @@ async def update_invoice(request: Request, id: int, invoice_data: UpdateInvoiceR raise HTTPException(status_code=500, detail=str(e)) @router.post('/{id}/mark-paid') -async def mark_invoice_as_paid(request: Request, id: int, payment_data: MarkInvoicePaidRequest, current_user: User=Depends(authorize_roles('admin', 'staff', 'accountant')), db: Session=Depends(get_db)): +async def mark_invoice_as_paid(request: Request, id: int, payment_data: MarkInvoicePaidRequest, current_user: User=Depends(authorize_roles('admin', 'accountant')), db: Session=Depends(get_db)): try: request_id = get_request_id(request) amount = payment_data.amount diff --git a/Backend/src/security/middleware/__pycache__/auth.cpython-312.pyc b/Backend/src/security/middleware/__pycache__/auth.cpython-312.pyc index 02dce2d2..ed18b301 100644 Binary files a/Backend/src/security/middleware/__pycache__/auth.cpython-312.pyc and b/Backend/src/security/middleware/__pycache__/auth.cpython-312.pyc differ diff --git a/Backend/src/security/middleware/__pycache__/role_based_rate_limit.cpython-312.pyc b/Backend/src/security/middleware/__pycache__/role_based_rate_limit.cpython-312.pyc new file mode 100644 index 00000000..ca7b886e Binary files /dev/null and b/Backend/src/security/middleware/__pycache__/role_based_rate_limit.cpython-312.pyc differ diff --git a/Backend/src/security/middleware/auth.py b/Backend/src/security/middleware/auth.py index fd1249c6..f73bae08 100644 --- a/Backend/src/security/middleware/auth.py +++ b/Backend/src/security/middleware/auth.py @@ -3,6 +3,7 @@ from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials from jose import JWTError, jwt from sqlalchemy.orm import Session from typing import Optional +from datetime import datetime import os from ...shared.config.database import get_db from ...shared.config.settings import settings @@ -15,26 +16,39 @@ def get_jwt_secret() -> str: Get JWT secret securely, fail if not configured. Never use hardcoded fallback secrets. """ - default_secret = 'dev-secret-key-change-in-production-12345' + # Remove default secret entirely - fail fast if not configured jwt_secret = getattr(settings, 'JWT_SECRET', None) or os.getenv('JWT_SECRET', None) - # Fail fast if secret is not configured or using default value - if not jwt_secret or jwt_secret == default_secret: - if settings.is_production: - raise ValueError( - 'CRITICAL: JWT_SECRET is not properly configured in production. ' - 'Please set JWT_SECRET environment variable to a secure random string.' - ) - # In development, warn but allow (startup validation should catch this) - import warnings - warnings.warn( - f'JWT_SECRET not configured. Using settings value but this is insecure. ' - f'Set JWT_SECRET environment variable.', - UserWarning + # Fail fast if secret is not configured + if not jwt_secret: + error_msg = ( + 'CRITICAL: JWT_SECRET is not configured. ' + 'Please set JWT_SECRET environment variable to a secure random string (minimum 32 characters).' ) - jwt_secret = getattr(settings, 'JWT_SECRET', None) - if not jwt_secret: - raise ValueError('JWT_SECRET must be configured') + import logging + logger = logging.getLogger(__name__) + logger.error(error_msg) + if settings.is_production: + raise ValueError(error_msg) + else: + # In development, generate a secure secret but warn + import secrets + jwt_secret = secrets.token_urlsafe(64) + logger.warning( + f'JWT_SECRET not configured. Auto-generated secret for development. ' + f'Set JWT_SECRET environment variable for production: {jwt_secret}' + ) + + # Validate JWT secret strength + if len(jwt_secret) < 32: + error_msg = 'JWT_SECRET must be at least 32 characters long for security.' + import logging + logger = logging.getLogger(__name__) + logger.error(error_msg) + if settings.is_production: + raise ValueError(error_msg) + else: + logger.warning(error_msg) return jwt_secret @@ -80,6 +94,22 @@ def get_current_user( user = db.query(User).filter(User.id == user_id).first() if user is None: raise credentials_exception + + # Check if user account is active + if not user.is_active: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail='Account is disabled. Please contact support.' + ) + + # Check if account is locked + if user.locked_until and user.locked_until > datetime.utcnow(): + remaining_minutes = int((user.locked_until - datetime.utcnow()).total_seconds() / 60) + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail=f'Account is temporarily locked due to multiple failed login attempts. Please try again in {remaining_minutes} minute(s).' + ) + return user def authorize_roles(*allowed_roles: str): @@ -102,7 +132,8 @@ def get_current_user_optional( ) -> Optional[User]: """ Get current user optionally from either Authorization header or httpOnly cookie. - Returns None if no valid token is found. + Returns None if no valid token is found, or if user is inactive/locked. + This ensures inactive/locked users are never considered "authenticated" even for optional features. """ # Try to get token from Authorization header first token = None @@ -124,7 +155,19 @@ def get_current_user_optional( return None except (JWTError, ValueError): return None + user = db.query(User).filter(User.id == user_id).first() + if user is None: + return None + + # Check if user account is active - return None for inactive users + if not user.is_active: + return None + + # Check if account is locked - return None for locked users + if user.locked_until and user.locked_until > datetime.utcnow(): + return None + return user def verify_token(token: str) -> dict: diff --git a/Backend/src/security/middleware/role_based_rate_limit.py b/Backend/src/security/middleware/role_based_rate_limit.py new file mode 100644 index 00000000..60b64912 --- /dev/null +++ b/Backend/src/security/middleware/role_based_rate_limit.py @@ -0,0 +1,47 @@ +""" +Role-based rate limiting middleware. +""" +from slowapi import Limiter +from slowapi.util import get_remote_address +from typing import Callable +from fastapi import Request +from ...shared.config.settings import settings +from ...shared.config.logging_config import get_logger + +logger = get_logger(__name__) + +def get_rate_limit_key(request: Request) -> str: + """Get rate limit key based on user role if authenticated, otherwise IP.""" + # Try to get user from request state (set by auth middleware) + if hasattr(request.state, 'user') and request.state.user: + user = request.state.user + # Get user role + role = getattr(user, 'role', None) + if role: + role_name = getattr(role, 'name', None) + if role_name: + # Return role-based key + return f"rate_limit:{role_name}:{user.id}" + + # Fall back to IP-based limiting + return get_remote_address(request) + +def get_rate_limit_for_role(role_name: str) -> str: + """Get rate limit string for a specific role.""" + limits = { + 'admin': settings.RATE_LIMIT_ADMIN_PER_MINUTE, + 'staff': settings.RATE_LIMIT_STAFF_PER_MINUTE, + 'accountant': settings.RATE_LIMIT_ACCOUNTANT_PER_MINUTE, + 'customer': settings.RATE_LIMIT_CUSTOMER_PER_MINUTE, + } + + limit = limits.get(role_name, settings.RATE_LIMIT_PER_MINUTE) + return f"{limit}/minute" + +def create_role_based_limiter() -> Limiter: + """Create a role-based rate limiter.""" + return Limiter( + key_func=get_rate_limit_key, + default_limits=[f'{settings.RATE_LIMIT_PER_MINUTE}/minute'] + ) + diff --git a/Backend/src/shared/config/__pycache__/settings.cpython-312.pyc b/Backend/src/shared/config/__pycache__/settings.cpython-312.pyc index e8b0d865..d5a2d551 100644 Binary files a/Backend/src/shared/config/__pycache__/settings.cpython-312.pyc and b/Backend/src/shared/config/__pycache__/settings.cpython-312.pyc differ diff --git a/Backend/src/shared/config/settings.py b/Backend/src/shared/config/settings.py index b65a8db2..4f410899 100644 --- a/Backend/src/shared/config/settings.py +++ b/Backend/src/shared/config/settings.py @@ -28,6 +28,10 @@ class Settings(BaseSettings): CORS_ORIGINS: List[str] = Field(default_factory=lambda: ['http://localhost:5173', 'http://localhost:3000', 'http://127.0.0.1:5173'], description='Allowed CORS origins') RATE_LIMIT_ENABLED: bool = Field(default=True, description='Enable rate limiting') RATE_LIMIT_PER_MINUTE: int = Field(default=60, description='Requests per minute per IP') + RATE_LIMIT_ADMIN_PER_MINUTE: int = Field(default=300, description='Requests per minute for admin users') + RATE_LIMIT_STAFF_PER_MINUTE: int = Field(default=200, description='Requests per minute for staff users') + RATE_LIMIT_ACCOUNTANT_PER_MINUTE: int = Field(default=200, description='Requests per minute for accountant users') + RATE_LIMIT_CUSTOMER_PER_MINUTE: int = Field(default=100, description='Requests per minute for customer users') CSRF_PROTECTION_ENABLED: bool = Field(default=True, description='Enable CSRF protection') HSTS_PRELOAD_ENABLED: bool = Field(default=False, description='Enable HSTS preload directive (requires domain submission to hstspreload.org)') LOG_LEVEL: str = Field(default='INFO', description='Log level: DEBUG, INFO, WARNING, ERROR, CRITICAL') diff --git a/Backend/src/shared/middleware/__pycache__/api_versioning.cpython-312.pyc b/Backend/src/shared/middleware/__pycache__/api_versioning.cpython-312.pyc new file mode 100644 index 00000000..aba68c1f Binary files /dev/null and b/Backend/src/shared/middleware/__pycache__/api_versioning.cpython-312.pyc differ diff --git a/Backend/src/shared/middleware/api_versioning.py b/Backend/src/shared/middleware/api_versioning.py new file mode 100644 index 00000000..5db735c4 --- /dev/null +++ b/Backend/src/shared/middleware/api_versioning.py @@ -0,0 +1,51 @@ +""" +API versioning middleware for backward compatibility. +""" +from fastapi import Request, HTTPException, status +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.responses import Response +from typing import Callable +import re +from ...shared.config.logging_config import get_logger + +logger = get_logger(__name__) + +class APIVersioningMiddleware(BaseHTTPMiddleware): + """Middleware to handle API versioning.""" + + def __init__(self, app, default_version: str = "v1"): + super().__init__(app) + self.default_version = default_version + self.version_pattern = re.compile(r'/api/v(\d+)/') + + async def dispatch(self, request: Request, call_next: Callable) -> Response: + """Process request and handle versioning.""" + path = request.url.path + + # Check if path starts with /api + if path.startswith('/api'): + # Extract version from path + version_match = self.version_pattern.search(path) + + if version_match: + version = version_match.group(1) + # Store version in request state + request.state.api_version = f"v{version}" + # Remove version from path for routing + new_path = self.version_pattern.sub('/api/', path) + request.scope['path'] = new_path + elif path.startswith('/api/') and not path.startswith('/api/v'): + # No version specified, use default + request.state.api_version = self.default_version + else: + # Health check or other non-versioned endpoints + request.state.api_version = None + + response = await call_next(request) + + # Add version header to response + if hasattr(request.state, 'api_version') and request.state.api_version: + response.headers['X-API-Version'] = request.state.api_version + + return response + diff --git a/Backend/src/shared/utils/audit_decorator.py b/Backend/src/shared/utils/audit_decorator.py new file mode 100644 index 00000000..707c995f --- /dev/null +++ b/Backend/src/shared/utils/audit_decorator.py @@ -0,0 +1,111 @@ +""" +Decorator for automatic audit logging of financial operations. +""" +from functools import wraps +from typing import Callable, Any +from fastapi import Request +from sqlalchemy.orm import Session +from ...analytics.services.audit_service import audit_service +from ...shared.config.logging_config import get_logger + +logger = get_logger(__name__) + +def audit_financial_operation(action: str, resource_type: str): + """ + Decorator to automatically log financial operations to audit trail. + + Usage: + @audit_financial_operation('payment_refunded', 'payment') + async def refund_payment(...): + ... + """ + def decorator(func: Callable) -> Callable: + @wraps(func) + async def wrapper(*args, **kwargs): + # Extract request and db from function arguments + request: Request = None + db: Session = None + current_user = None + + # Find request and db in kwargs or args + for arg in list(args) + list(kwargs.values()): + if isinstance(arg, Request): + request = arg + elif isinstance(arg, Session): + db = arg + elif hasattr(arg, 'id') and hasattr(arg, 'email'): # Likely a User object + current_user = arg + + # Get request from kwargs if not found + if not request and 'request' in kwargs: + request = kwargs['request'] + if not db and 'db' in kwargs: + db = kwargs['db'] + if not current_user and 'current_user' in kwargs: + current_user = kwargs['current_user'] + + # Extract resource_id from function arguments if available + resource_id = None + if 'id' in kwargs: + resource_id = kwargs['id'] + elif len(args) > 0 and isinstance(args[0], int): + resource_id = args[0] + + # Get client info + client_ip = None + user_agent = None + request_id = None + if request: + client_ip = request.client.host if request.client else None + user_agent = request.headers.get('User-Agent') + request_id = getattr(request.state, 'request_id', None) + + try: + # Execute the function + result = await func(*args, **kwargs) + + # Log successful operation + if db and current_user: + await audit_service.log_action( + db=db, + action=action, + resource_type=resource_type, + user_id=current_user.id if current_user else None, + resource_id=resource_id, + ip_address=client_ip, + user_agent=user_agent, + request_id=request_id, + details={ + 'function': func.__name__, + 'result': 'success' + }, + status='success' + ) + + return result + except Exception as e: + # Log failed operation + if db and current_user: + await audit_service.log_action( + db=db, + action=action, + resource_type=resource_type, + user_id=current_user.id if current_user else None, + resource_id=resource_id, + ip_address=client_ip, + user_agent=user_agent, + request_id=request_id, + details={ + 'function': func.__name__, + 'result': 'failed', + 'error': str(e) + }, + status='failed', + error_message=str(e) + ) + + raise + + return wrapper + return decorator + diff --git a/Backend/src/system/models/__pycache__/approval_workflow.cpython-312.pyc b/Backend/src/system/models/__pycache__/approval_workflow.cpython-312.pyc new file mode 100644 index 00000000..b21fca35 Binary files /dev/null and b/Backend/src/system/models/__pycache__/approval_workflow.cpython-312.pyc differ diff --git a/Backend/src/system/models/approval_workflow.py b/Backend/src/system/models/approval_workflow.py new file mode 100644 index 00000000..1e4069f2 --- /dev/null +++ b/Backend/src/system/models/approval_workflow.py @@ -0,0 +1,59 @@ +""" +Approval workflow models for enterprise financial operations. +""" +from sqlalchemy import Column, Integer, String, Text, DateTime, ForeignKey, Enum, JSON +from sqlalchemy.orm import relationship +from datetime import datetime +import enum +from ...shared.config.database import Base + +class ApprovalStatus(str, enum.Enum): + pending = "pending" + approved = "approved" + rejected = "rejected" + cancelled = "cancelled" + +class ApprovalType(str, enum.Enum): + invoice_update = "invoice_update" + payment_refund = "payment_refund" + invoice_mark_paid = "invoice_mark_paid" + financial_adjustment = "financial_adjustment" + user_role_change = "user_role_change" + large_transaction = "large_transaction" + +class ApprovalRequest(Base): + __tablename__ = 'approval_requests' + + id = Column(Integer, primary_key=True, index=True, autoincrement=True) + approval_type = Column(Enum(ApprovalType), nullable=False, index=True) + status = Column(Enum(ApprovalStatus), default=ApprovalStatus.pending, nullable=False, index=True) + + # Who requested + requested_by = Column(Integer, ForeignKey('users.id'), nullable=False, index=True) + requested_at = Column(DateTime, default=datetime.utcnow, nullable=False) + + # Who approved/rejected + approved_by = Column(Integer, ForeignKey('users.id'), nullable=True, index=True) + approved_at = Column(DateTime, nullable=True) + rejection_reason = Column(Text, nullable=True) + + # Resource being approved + resource_type = Column(String(50), nullable=False, index=True) + resource_id = Column(Integer, nullable=False, index=True) + + # Request details + request_data = Column(JSON, nullable=True) + current_data = Column(JSON, nullable=True) # Snapshot of current state + + # Metadata + priority = Column(String(20), default='normal') # low, normal, high, urgent + notes = Column(Text, nullable=True) + extra_metadata = Column(JSON, nullable=True) # Renamed from 'metadata' as it's reserved in SQLAlchemy + + # Relationships + requester = relationship('User', foreign_keys=[requested_by]) + approver = relationship('User', foreign_keys=[approved_by]) + + created_at = Column(DateTime, default=datetime.utcnow, nullable=False, index=True) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False) + diff --git a/Backend/src/system/routes/__pycache__/approval_routes.cpython-312.pyc b/Backend/src/system/routes/__pycache__/approval_routes.cpython-312.pyc new file mode 100644 index 00000000..ab54e6aa Binary files /dev/null and b/Backend/src/system/routes/__pycache__/approval_routes.cpython-312.pyc differ diff --git a/Backend/src/system/routes/__pycache__/backup_routes.cpython-312.pyc b/Backend/src/system/routes/__pycache__/backup_routes.cpython-312.pyc new file mode 100644 index 00000000..681228f0 Binary files /dev/null and b/Backend/src/system/routes/__pycache__/backup_routes.cpython-312.pyc differ diff --git a/Backend/src/system/routes/approval_routes.py b/Backend/src/system/routes/approval_routes.py new file mode 100644 index 00000000..b092a4d1 --- /dev/null +++ b/Backend/src/system/routes/approval_routes.py @@ -0,0 +1,200 @@ +""" +Approval workflow routes. +""" +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlalchemy.orm import Session +from typing import Optional +from ...shared.config.database import get_db +from ...shared.config.logging_config import get_logger +from ...security.middleware.auth import get_current_user, authorize_roles +from ...auth.models.user import User +from ...system.services.approval_service import approval_service +from ...system.models.approval_workflow import ApprovalRequest, ApprovalStatus, ApprovalType +from ...shared.utils.response_helpers import success_response + +logger = get_logger(__name__) +router = APIRouter(prefix='/approvals', tags=['approvals']) + +@router.get('/pending') +async def get_pending_approvals( + approval_type: Optional[str] = Query(None), + page: int = Query(1, ge=1), + limit: int = Query(50, ge=1, le=100), + current_user: User = Depends(authorize_roles('admin', 'accountant')), + db: Session = Depends(get_db) +): + """Get pending approval requests.""" + try: + approval_type_enum = None + if approval_type: + try: + approval_type_enum = ApprovalType(approval_type) + except ValueError: + raise HTTPException(status_code=400, detail=f'Invalid approval_type: {approval_type}') + + offset = (page - 1) * limit + approvals = approval_service.get_pending_approvals( + db=db, + approval_type=approval_type_enum, + limit=limit, + offset=offset + ) + + return success_response(data={ + 'approvals': [{ + 'id': app.id, + 'approval_type': app.approval_type.value, + 'status': app.status.value, + 'resource_type': app.resource_type, + 'resource_id': app.resource_id, + 'requested_by': app.requested_by, + 'requested_at': app.requested_at.isoformat() if app.requested_at else None, + 'priority': app.priority, + 'notes': app.notes, + 'request_data': app.request_data, + 'current_data': app.current_data + } for app in approvals] + }) + except HTTPException: + raise + except Exception as e: + logger.error(f'Error getting pending approvals: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + +@router.post('/{request_id}/approve') +async def approve_request( + request_id: int, + notes: Optional[str] = None, + current_user: User = Depends(authorize_roles('admin', 'accountant')), + db: Session = Depends(get_db) +): + """Approve an approval request.""" + try: + approval = await approval_service.approve_request( + db=db, + request_id=request_id, + approved_by=current_user.id, + notes=notes + ) + + return success_response( + data={'approval': { + 'id': approval.id, + 'status': approval.status.value, + 'approved_by': approval.approved_by, + 'approved_at': approval.approved_at.isoformat() if approval.approved_at else None + }}, + message='Approval request approved successfully' + ) + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f'Error approving request: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + +@router.post('/{request_id}/reject') +async def reject_request( + request_id: int, + rejection_reason: str, + current_user: User = Depends(authorize_roles('admin', 'accountant')), + db: Session = Depends(get_db) +): + """Reject an approval request.""" + try: + approval = await approval_service.reject_request( + db=db, + request_id=request_id, + rejected_by=current_user.id, + rejection_reason=rejection_reason + ) + + return success_response( + data={'approval': { + 'id': approval.id, + 'status': approval.status.value, + 'rejection_reason': approval.rejection_reason + }}, + message='Approval request rejected' + ) + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f'Error rejecting request: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + +@router.get('/my-requests') +async def get_my_approval_requests( + status: Optional[str] = Query(None), + page: int = Query(1, ge=1), + limit: int = Query(50, ge=1, le=100), + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Get current user's approval requests.""" + try: + status_enum = None + if status: + try: + status_enum = ApprovalStatus(status) + except ValueError: + raise HTTPException(status_code=400, detail=f'Invalid status: {status}') + + offset = (page - 1) * limit + approvals = approval_service.get_user_approvals( + db=db, + user_id=current_user.id, + status=status_enum, + limit=limit, + offset=offset + ) + + return success_response(data={ + 'approvals': [{ + 'id': app.id, + 'approval_type': app.approval_type.value, + 'status': app.status.value, + 'resource_type': app.resource_type, + 'resource_id': app.resource_id, + 'requested_at': app.requested_at.isoformat() if app.requested_at else None, + 'approved_at': app.approved_at.isoformat() if app.approved_at else None, + 'rejection_reason': app.rejection_reason + } for app in approvals] + }) + except HTTPException: + raise + except Exception as e: + logger.error(f'Error getting user approvals: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + +@router.delete('/{request_id}') +async def cancel_approval_request( + request_id: int, + current_user: User = Depends(get_current_user), + db: Session = Depends(get_db) +): + """Cancel an approval request (only by requester or admin).""" + try: + approval = db.query(ApprovalRequest).filter(ApprovalRequest.id == request_id).first() + + if not approval: + raise HTTPException(status_code=404, detail='Approval request not found') + + # Only requester or admin can cancel + if approval.requested_by != current_user.id and current_user.role.name != 'admin': + raise HTTPException(status_code=403, detail='Not authorized to cancel this request') + + # Only pending requests can be cancelled + if approval.status != ApprovalStatus.pending: + raise HTTPException(status_code=400, detail='Only pending requests can be cancelled') + + approval.status = ApprovalStatus.cancelled + db.commit() + + logger.info(f'Approval request {request_id} cancelled by user {current_user.id}') + return success_response(message='Approval request cancelled successfully') + except HTTPException: + raise + except Exception as e: + logger.error(f'Error cancelling approval request: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + diff --git a/Backend/src/system/services/__pycache__/approval_service.cpython-312.pyc b/Backend/src/system/services/__pycache__/approval_service.cpython-312.pyc new file mode 100644 index 00000000..73dafca4 Binary files /dev/null and b/Backend/src/system/services/__pycache__/approval_service.cpython-312.pyc differ diff --git a/Backend/src/system/services/__pycache__/backup_service.cpython-312.pyc b/Backend/src/system/services/__pycache__/backup_service.cpython-312.pyc new file mode 100644 index 00000000..de4faaf9 Binary files /dev/null and b/Backend/src/system/services/__pycache__/backup_service.cpython-312.pyc differ diff --git a/Backend/src/system/services/approval_service.py b/Backend/src/system/services/approval_service.py new file mode 100644 index 00000000..440117c2 --- /dev/null +++ b/Backend/src/system/services/approval_service.py @@ -0,0 +1,231 @@ +""" +Approval workflow service for enterprise financial operations. +""" +from sqlalchemy.orm import Session +from typing import Optional, Dict, Any, List +from datetime import datetime +from ..models.approval_workflow import ApprovalRequest, ApprovalStatus, ApprovalType +from ...auth.models.user import User +from ...shared.config.logging_config import get_logger +from ...analytics.services.audit_service import audit_service + +logger = get_logger(__name__) + +class ApprovalService: + """Service for managing approval workflows.""" + + # Thresholds for automatic approval requirements + REFUND_THRESHOLD = 1000.0 # Require approval for refunds > $1000 + INVOICE_UPDATE_THRESHOLD = 500.0 # Require approval for invoice updates > $500 + LARGE_TRANSACTION_THRESHOLD = 5000.0 # Require approval for transactions > $5000 + + @staticmethod + def requires_approval( + approval_type: ApprovalType, + amount: Optional[float] = None, + user_role: Optional[str] = None + ) -> bool: + """Check if an action requires approval.""" + # Admin and accountant can auto-approve their own actions + if user_role in ['admin', 'accountant']: + return False + + # Staff always requires approval for financial operations + if user_role == 'staff': + if approval_type in [ + ApprovalType.invoice_update, + ApprovalType.payment_refund, + ApprovalType.invoice_mark_paid, + ApprovalType.financial_adjustment + ]: + return True + + # Check amount thresholds + if amount: + if approval_type == ApprovalType.payment_refund and amount > ApprovalService.REFUND_THRESHOLD: + return True + if approval_type == ApprovalType.invoice_update and amount > ApprovalService.INVOICE_UPDATE_THRESHOLD: + return True + if approval_type == ApprovalType.large_transaction and amount > ApprovalService.LARGE_TRANSACTION_THRESHOLD: + return True + + return False + + @staticmethod + async def create_approval_request( + db: Session, + approval_type: ApprovalType, + resource_type: str, + resource_id: int, + requested_by: int, + request_data: Dict[str, Any], + current_data: Optional[Dict[str, Any]] = None, + priority: str = 'normal', + notes: Optional[str] = None, + metadata: Optional[Dict[str, Any]] = None + ) -> ApprovalRequest: + """Create a new approval request.""" + approval_request = ApprovalRequest( + approval_type=approval_type, + status=ApprovalStatus.pending, + requested_by=requested_by, + resource_type=resource_type, + resource_id=resource_id, + request_data=request_data, + current_data=current_data, + priority=priority, + notes=notes, + extra_metadata=metadata + ) + + db.add(approval_request) + db.commit() + db.refresh(approval_request) + + # Log approval request creation + await audit_service.log_action( + db=db, + action='approval_request_created', + resource_type='approval_request', + user_id=requested_by, + resource_id=approval_request.id, + details={ + 'approval_type': approval_type.value, + 'resource_type': resource_type, + 'resource_id': resource_id, + 'priority': priority + }, + status='success' + ) + + logger.info(f'Approval request created: {approval_request.id} for {approval_type.value}') + return approval_request + + @staticmethod + async def approve_request( + db: Session, + request_id: int, + approved_by: int, + notes: Optional[str] = None + ) -> ApprovalRequest: + """Approve an approval request.""" + approval_request = db.query(ApprovalRequest).filter( + ApprovalRequest.id == request_id + ).first() + + if not approval_request: + raise ValueError('Approval request not found') + + if approval_request.status != ApprovalStatus.pending: + raise ValueError(f'Approval request is already {approval_request.status.value}') + + approval_request.status = ApprovalStatus.approved + approval_request.approved_by = approved_by + approval_request.approved_at = datetime.utcnow() + if notes: + approval_request.notes = notes + + db.commit() + db.refresh(approval_request) + + # Log approval + await audit_service.log_action( + db=db, + action='approval_request_approved', + resource_type='approval_request', + user_id=approved_by, + resource_id=request_id, + details={ + 'approval_type': approval_request.approval_type.value, + 'resource_type': approval_request.resource_type, + 'resource_id': approval_request.resource_id + }, + status='success' + ) + + logger.info(f'Approval request {request_id} approved by user {approved_by}') + return approval_request + + @staticmethod + async def reject_request( + db: Session, + request_id: int, + rejected_by: int, + rejection_reason: str + ) -> ApprovalRequest: + """Reject an approval request.""" + approval_request = db.query(ApprovalRequest).filter( + ApprovalRequest.id == request_id + ).first() + + if not approval_request: + raise ValueError('Approval request not found') + + if approval_request.status != ApprovalStatus.pending: + raise ValueError(f'Approval request is already {approval_request.status.value}') + + approval_request.status = ApprovalStatus.rejected + approval_request.approved_by = rejected_by + approval_request.approved_at = datetime.utcnow() + approval_request.rejection_reason = rejection_reason + + db.commit() + db.refresh(approval_request) + + # Log rejection + await audit_service.log_action( + db=db, + action='approval_request_rejected', + resource_type='approval_request', + user_id=rejected_by, + resource_id=request_id, + details={ + 'approval_type': approval_request.approval_type.value, + 'rejection_reason': rejection_reason + }, + status='success' + ) + + logger.info(f'Approval request {request_id} rejected by user {rejected_by}') + return approval_request + + @staticmethod + def get_pending_approvals( + db: Session, + approval_type: Optional[ApprovalType] = None, + limit: int = 50, + offset: int = 0 + ) -> List[ApprovalRequest]: + """Get pending approval requests.""" + query = db.query(ApprovalRequest).filter( + ApprovalRequest.status == ApprovalStatus.pending + ) + + if approval_type: + query = query.filter(ApprovalRequest.approval_type == approval_type) + + return query.order_by( + ApprovalRequest.priority.desc(), + ApprovalRequest.requested_at.asc() + ).offset(offset).limit(limit).all() + + @staticmethod + def get_user_approvals( + db: Session, + user_id: int, + status: Optional[ApprovalStatus] = None, + limit: int = 50, + offset: int = 0 + ) -> List[ApprovalRequest]: + """Get approval requests for a user.""" + query = db.query(ApprovalRequest).filter( + ApprovalRequest.requested_by == user_id + ) + + if status: + query = query.filter(ApprovalRequest.status == status) + + return query.order_by(ApprovalRequest.created_at.desc()).offset(offset).limit(limit).all() + +approval_service = ApprovalService() + diff --git a/Backend/src/system/services/backup_service.py b/Backend/src/system/services/backup_service.py new file mode 100644 index 00000000..5afba402 --- /dev/null +++ b/Backend/src/system/services/backup_service.py @@ -0,0 +1,240 @@ +""" +Data backup service for enterprise data protection. +""" +from sqlalchemy.orm import Session +from typing import Optional, Dict, Any +from datetime import datetime, timedelta +from pathlib import Path +import json +import subprocess +import os +import shutil +from ...shared.config.settings import settings +from ...shared.config.logging_config import get_logger + +logger = get_logger(__name__) + +def is_running_in_docker() -> bool: + """Check if the application is running inside a Docker container.""" + # Check for .dockerenv file (most reliable method) + if os.path.exists('/.dockerenv'): + return True + # Check for Docker in cgroup + try: + with open('/proc/self/cgroup', 'r') as f: + if 'docker' in f.read(): + return True + except (FileNotFoundError, IOError): + pass + # Check environment variable + if os.environ.get('DOCKER_CONTAINER') == 'true': + return True + return False + +class BackupService: + """Service for managing database backups.""" + + BACKUP_DIR = Path('backups') + RETENTION_DAYS = 30 # Keep backups for 30 days + + @staticmethod + def ensure_backup_dir(): + """Ensure backup directory exists.""" + BackupService.BACKUP_DIR.mkdir(parents=True, exist_ok=True) + + @staticmethod + def check_mysqldump_available() -> bool: + """Check if mysqldump is available on the system.""" + return shutil.which('mysqldump') is not None + + @staticmethod + def create_backup(db_name: str, db_user: str, db_password: str, db_host: str, db_port: str) -> Dict[str, Any]: + """ + Create a database backup. + Returns backup metadata. + """ + BackupService.ensure_backup_dir() + + # Check if mysqldump is available + if not BackupService.check_mysqldump_available(): + error_msg = ( + "mysqldump is not installed or not in PATH. " + "Please install MySQL client tools: " + "Ubuntu/Debian: sudo apt-get install mysql-client " + "or CentOS/RHEL: sudo yum install mysql" + ) + logger.error(error_msg) + raise FileNotFoundError(error_msg) + + timestamp = datetime.utcnow().strftime('%Y%m%d_%H%M%S') + backup_filename = f'backup_{db_name}_{timestamp}.sql' + backup_path = BackupService.BACKUP_DIR / backup_filename + + try: + # Detect Docker environment + in_docker = is_running_in_docker() + + # Determine the correct host to use + # In Docker, if host is localhost, try to use the MySQL service name or host.docker.internal + actual_host = db_host + if in_docker: + if db_host in ('localhost', '127.0.0.1'): + # Try to use MySQL service name from docker-compose (common names) + mysql_service_name = os.environ.get('MYSQL_SERVICE_NAME', 'mysql') + actual_host = mysql_service_name + logger.info(f"Running in Docker, using MySQL service name: {actual_host}") + else: + logger.info(f"Running in Docker, using configured host: {actual_host}") + else: + logger.info(f"Running on host, using configured host: {actual_host}") + + # Use mysqldump for MySQL/MariaDB + # Note: In production, use proper credentials management + env = os.environ.copy() + if db_password: + env['MYSQL_PWD'] = db_password + + # Get full path to mysqldump + mysqldump_path = shutil.which('mysqldump') + if not mysqldump_path: + raise FileNotFoundError("mysqldump not found in PATH") + + # Build mysqldump command + # Always use TCP protocol to avoid socket issues, especially in Docker + cmd = [ + mysqldump_path, + '--protocol=TCP', # Force TCP/IP connection + f'--user={db_user}', + f'--host={actual_host}', + f'--port={db_port}', + '--single-transaction', + '--routines', + '--triggers', + '--skip-lock-tables', # Useful in Docker environments + db_name + ] + + logger.info(f"Executing mysqldump: {' '.join(cmd[:3])}... [password hidden]") + + with open(backup_path, 'w') as f: + result = subprocess.run( + cmd, + stdout=f, + stderr=subprocess.PIPE, + env=env, + text=True, + timeout=300 # 5 minute timeout + ) + + if result.returncode != 0: + error_msg = result.stderr or 'Unknown error' + # Provide more helpful error messages + if 'Can\'t connect' in error_msg or '2002' in error_msg or '2003' in error_msg: + if in_docker: + enhanced_error = ( + f"Connection failed. Running in Docker environment.\n" + f"Attempted to connect to: {actual_host}:{db_port}\n" + f"Original error: {error_msg}\n" + f"Tip: Ensure MySQL service is accessible from this container. " + f"If using docker-compose, the service name should be 'mysql' or set MYSQL_SERVICE_NAME env var." + ) + else: + enhanced_error = ( + f"Connection failed to MySQL server.\n" + f"Attempted to connect to: {actual_host}:{db_port}\n" + f"Original error: {error_msg}\n" + f"Tip: Ensure MySQL server is running and accessible at {actual_host}:{db_port}" + ) + raise Exception(f'Backup failed: {enhanced_error}') + raise Exception(f'Backup failed: {error_msg}') + + # Get backup size + backup_size = backup_path.stat().st_size + + # Create metadata + metadata = { + 'filename': backup_filename, + 'path': str(backup_path), + 'size_bytes': backup_size, + 'size_mb': round(backup_size / (1024 * 1024), 2), + 'created_at': datetime.utcnow().isoformat(), + 'database': db_name, + 'status': 'success' + } + + # Save metadata + metadata_path = backup_path.with_suffix('.json') + with open(metadata_path, 'w') as f: + json.dump(metadata, f, indent=2) + + logger.info(f'Backup created: {backup_filename} ({metadata["size_mb"]} MB)') + return metadata + + except Exception as e: + logger.error(f'Backup failed: {str(e)}', exc_info=True) + raise + + @staticmethod + def list_backups() -> list[Dict[str, Any]]: + """List all available backups.""" + BackupService.ensure_backup_dir() + + backups = [] + for backup_file in BackupService.BACKUP_DIR.glob('backup_*.sql'): + metadata_file = backup_file.with_suffix('.json') + if metadata_file.exists(): + try: + with open(metadata_file, 'r') as f: + metadata = json.load(f) + backups.append(metadata) + except Exception as e: + logger.warning(f'Error reading backup metadata {metadata_file}: {str(e)}') + + # Sort by creation date (newest first) + backups.sort(key=lambda x: x.get('created_at', ''), reverse=True) + return backups + + @staticmethod + def cleanup_old_backups() -> int: + """Remove backups older than retention period.""" + BackupService.ensure_backup_dir() + + cutoff_date = datetime.utcnow() - timedelta(days=BackupService.RETENTION_DAYS) + removed_count = 0 + + for backup_file in BackupService.BACKUP_DIR.glob('backup_*.sql'): + metadata_file = backup_file.with_suffix('.json') + if metadata_file.exists(): + try: + with open(metadata_file, 'r') as f: + metadata = json.load(f) + created_at = datetime.fromisoformat(metadata.get('created_at', '')) + + if created_at < cutoff_date: + backup_file.unlink() + metadata_file.unlink() + removed_count += 1 + logger.info(f'Removed old backup: {backup_file.name}') + except Exception as e: + logger.warning(f'Error processing backup {backup_file}: {str(e)}') + + return removed_count + + @staticmethod + def get_backup_info(backup_filename: str) -> Optional[Dict[str, Any]]: + """Get information about a specific backup.""" + backup_path = BackupService.BACKUP_DIR / backup_filename + metadata_path = backup_path.with_suffix('.json') + + if not metadata_path.exists(): + return None + + try: + with open(metadata_path, 'r') as f: + return json.load(f) + except Exception as e: + logger.error(f'Error reading backup info: {str(e)}') + return None + +backup_service = BackupService() + diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..7c40d44c Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/command.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/command.cpython-312.pyc new file mode 100644 index 00000000..e5b0425d Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/command.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/config.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/config.cpython-312.pyc new file mode 100644 index 00000000..e71f2454 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/config.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/context.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/context.cpython-312.pyc new file mode 100644 index 00000000..46f99ee1 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/context.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/op.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/op.cpython-312.pyc new file mode 100644 index 00000000..eb388e34 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/op.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..4a616c8e Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/api.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/api.cpython-312.pyc new file mode 100644 index 00000000..ac58ecf0 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/api.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/compare.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/compare.cpython-312.pyc new file mode 100644 index 00000000..fcc88a52 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/compare.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/render.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/render.cpython-312.pyc new file mode 100644 index 00000000..576ead09 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/render.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/rewriter.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/rewriter.cpython-312.pyc new file mode 100644 index 00000000..e7145d33 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/rewriter.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..d9657436 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/base.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/base.cpython-312.pyc new file mode 100644 index 00000000..3e997212 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/base.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/impl.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/impl.cpython-312.pyc new file mode 100644 index 00000000..238d48e2 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/impl.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/mssql.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/mssql.cpython-312.pyc new file mode 100644 index 00000000..5acbf8c7 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/mssql.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/mysql.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/mysql.cpython-312.pyc new file mode 100644 index 00000000..f35befea Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/mysql.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/oracle.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/oracle.cpython-312.pyc new file mode 100644 index 00000000..db7fca23 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/oracle.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/postgresql.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/postgresql.cpython-312.pyc new file mode 100644 index 00000000..d58ebc51 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/postgresql.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/sqlite.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/sqlite.cpython-312.pyc new file mode 100644 index 00000000..c3af3bd5 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/sqlite.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..f71e7488 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/base.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/base.cpython-312.pyc new file mode 100644 index 00000000..0063659f Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/base.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/batch.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/batch.cpython-312.pyc new file mode 100644 index 00000000..016821bc Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/batch.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/ops.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/ops.cpython-312.pyc new file mode 100644 index 00000000..29befe7d Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/ops.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/schemaobj.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/schemaobj.cpython-312.pyc new file mode 100644 index 00000000..b597aedc Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/schemaobj.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/toimpl.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/toimpl.cpython-312.pyc new file mode 100644 index 00000000..26c16a22 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/toimpl.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..4ee5fcc6 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/environment.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/environment.cpython-312.pyc new file mode 100644 index 00000000..91ca0ba5 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/environment.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/migration.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/migration.cpython-312.pyc new file mode 100644 index 00000000..e4f90bb9 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/migration.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/script/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/script/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..3dad6ce9 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/script/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/script/__pycache__/base.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/script/__pycache__/base.cpython-312.pyc new file mode 100644 index 00000000..cf1d4eca Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/script/__pycache__/base.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/script/__pycache__/revision.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/script/__pycache__/revision.cpython-312.pyc new file mode 100644 index 00000000..9267029a Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/script/__pycache__/revision.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/script/__pycache__/write_hooks.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/script/__pycache__/write_hooks.cpython-312.pyc new file mode 100644 index 00000000..091d680d Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/script/__pycache__/write_hooks.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..3a6cd320 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/compat.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/compat.cpython-312.pyc new file mode 100644 index 00000000..be1fa177 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/compat.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/editor.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/editor.cpython-312.pyc new file mode 100644 index 00000000..d139ac5c Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/editor.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/exc.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/exc.cpython-312.pyc new file mode 100644 index 00000000..f2bc74e7 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/exc.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/langhelpers.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/langhelpers.cpython-312.pyc new file mode 100644 index 00000000..08c0c07e Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/langhelpers.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/messaging.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/messaging.cpython-312.pyc new file mode 100644 index 00000000..a3e5dc9e Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/messaging.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/pyfiles.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/pyfiles.cpython-312.pyc new file mode 100644 index 00000000..0a6a26a1 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/pyfiles.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/sqla_compat.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/sqla_compat.cpython-312.pyc new file mode 100644 index 00000000..98ebdc1b Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/sqla_compat.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..43e14cb8 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/_ast_util.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/_ast_util.cpython-312.pyc new file mode 100644 index 00000000..4e720637 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/_ast_util.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/ast.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/ast.cpython-312.pyc new file mode 100644 index 00000000..34dfe8f8 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/ast.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/cache.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/cache.cpython-312.pyc new file mode 100644 index 00000000..01b753a0 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/cache.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/codegen.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/codegen.cpython-312.pyc new file mode 100644 index 00000000..3b937af5 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/codegen.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/compat.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/compat.cpython-312.pyc new file mode 100644 index 00000000..b74742c2 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/compat.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/exceptions.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/exceptions.cpython-312.pyc new file mode 100644 index 00000000..173bd8aa Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/exceptions.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/filters.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/filters.cpython-312.pyc new file mode 100644 index 00000000..3cbab225 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/filters.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/lexer.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/lexer.cpython-312.pyc new file mode 100644 index 00000000..27562941 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/lexer.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/parsetree.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/parsetree.cpython-312.pyc new file mode 100644 index 00000000..d474b5d4 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/parsetree.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/pygen.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/pygen.cpython-312.pyc new file mode 100644 index 00000000..9f3e9ff3 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/pygen.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/pyparser.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/pyparser.cpython-312.pyc new file mode 100644 index 00000000..7de206c1 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/pyparser.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/runtime.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/runtime.cpython-312.pyc new file mode 100644 index 00000000..260e76ff Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/runtime.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/template.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/template.cpython-312.pyc new file mode 100644 index 00000000..ded10a55 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/template.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/util.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/util.cpython-312.pyc new file mode 100644 index 00000000..55d898e0 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/mako/__pycache__/util.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/mako/ext/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/mako/ext/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..fcb82f06 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/mako/ext/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/mako/ext/__pycache__/pygmentplugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/mako/ext/__pycache__/pygmentplugin.cpython-312.pyc new file mode 100644 index 00000000..91daa811 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/mako/ext/__pycache__/pygmentplugin.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/markupsafe/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/markupsafe/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..f16873a9 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/markupsafe/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/filter.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/filter.cpython-312.pyc new file mode 100644 index 00000000..99ea4bea Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/filter.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/formatter.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/formatter.cpython-312.pyc new file mode 100644 index 00000000..36a6b748 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/formatter.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/lexer.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/lexer.cpython-312.pyc new file mode 100644 index 00000000..7c3fbdb1 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/lexer.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/regexopt.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/regexopt.cpython-312.pyc new file mode 100644 index 00000000..fea4616e Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/regexopt.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/style.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/style.cpython-312.pyc new file mode 100644 index 00000000..3a25edb0 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/style.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/token.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/token.cpython-312.pyc new file mode 100644 index 00000000..50b426df Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/token.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/unistring.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/unistring.cpython-312.pyc new file mode 100644 index 00000000..ffada4ac Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/__pycache__/unistring.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/filters/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/filters/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..61cbd188 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/filters/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..50178757 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/_mapping.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/_mapping.cpython-312.pyc new file mode 100644 index 00000000..ef792eb3 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/_mapping.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/html.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/html.cpython-312.pyc new file mode 100644 index 00000000..71d6d7b0 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/formatters/__pycache__/html.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/_css_builtins.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/_css_builtins.cpython-312.pyc new file mode 100644 index 00000000..cb64f89e Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/_css_builtins.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/_lua_builtins.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/_lua_builtins.cpython-312.pyc new file mode 100644 index 00000000..6c188465 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/_lua_builtins.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/_scheme_builtins.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/_scheme_builtins.cpython-312.pyc new file mode 100644 index 00000000..ef37291a Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/_scheme_builtins.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/actionscript.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/actionscript.cpython-312.pyc new file mode 100644 index 00000000..b93e2d37 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/actionscript.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/agile.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/agile.cpython-312.pyc new file mode 100644 index 00000000..72ff9e8a Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/agile.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/css.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/css.cpython-312.pyc new file mode 100644 index 00000000..ba547685 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/css.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/d.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/d.cpython-312.pyc new file mode 100644 index 00000000..886e0b97 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/d.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/data.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/data.cpython-312.pyc new file mode 100644 index 00000000..8033c4f2 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/data.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/factor.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/factor.cpython-312.pyc new file mode 100644 index 00000000..3a97f1d3 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/factor.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/html.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/html.cpython-312.pyc new file mode 100644 index 00000000..24bceb50 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/html.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/iolang.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/iolang.cpython-312.pyc new file mode 100644 index 00000000..197dcb3e Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/iolang.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/javascript.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/javascript.cpython-312.pyc new file mode 100644 index 00000000..266a0acf Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/javascript.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/jvm.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/jvm.cpython-312.pyc new file mode 100644 index 00000000..9ad238ac Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/jvm.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/lisp.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/lisp.cpython-312.pyc new file mode 100644 index 00000000..243cf40a Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/lisp.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/perl.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/perl.cpython-312.pyc new file mode 100644 index 00000000..bd73b9e0 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/perl.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/php.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/php.cpython-312.pyc new file mode 100644 index 00000000..782b947e Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/php.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/python.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/python.cpython-312.pyc new file mode 100644 index 00000000..427a9c93 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/python.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/ruby.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/ruby.cpython-312.pyc new file mode 100644 index 00000000..9da7d592 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/ruby.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/scripting.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/scripting.cpython-312.pyc new file mode 100644 index 00000000..34a8dd58 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/scripting.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/tcl.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/tcl.cpython-312.pyc new file mode 100644 index 00000000..0963bff4 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/tcl.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/web.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/web.cpython-312.pyc new file mode 100644 index 00000000..a735e659 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/web.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/webmisc.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/webmisc.cpython-312.pyc new file mode 100644 index 00000000..f5e4d1f1 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/lexers/__pycache__/webmisc.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/styles/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/styles/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..08ab876f Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/styles/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/styles/__pycache__/_mapping.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/styles/__pycache__/_mapping.cpython-312.pyc new file mode 100644 index 00000000..1e431ae7 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/styles/__pycache__/_mapping.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/pygments/styles/__pycache__/default.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/pygments/styles/__pycache__/default.cpython-312.pyc new file mode 100644 index 00000000..2553eea5 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/pygments/styles/__pycache__/default.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/sqlalchemy/__pycache__/events.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/__pycache__/events.cpython-312.pyc new file mode 100644 index 00000000..9c205c78 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/__pycache__/events.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/__pycache__/_typing.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/__pycache__/_typing.cpython-312.pyc new file mode 100644 index 00000000..f2b25dbf Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/__pycache__/_typing.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 00000000..21727b7d Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/_psycopg_common.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/_psycopg_common.cpython-312.pyc new file mode 100644 index 00000000..f99bf7f3 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/_psycopg_common.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/array.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/array.cpython-312.pyc new file mode 100644 index 00000000..abe336da Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/array.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/asyncpg.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/asyncpg.cpython-312.pyc new file mode 100644 index 00000000..d9f850ec Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/asyncpg.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/base.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/base.cpython-312.pyc new file mode 100644 index 00000000..a20e8ccf Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/base.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-312.pyc new file mode 100644 index 00000000..33cbb614 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-312.pyc new file mode 100644 index 00000000..ec06bd93 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-312.pyc new file mode 100644 index 00000000..c1cdec7b Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/json.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/json.cpython-312.pyc new file mode 100644 index 00000000..4e4040d3 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/json.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/named_types.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/named_types.cpython-312.pyc new file mode 100644 index 00000000..d58ccdf8 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/named_types.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/operators.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/operators.cpython-312.pyc new file mode 100644 index 00000000..12f75f8f Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/operators.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-312.pyc new file mode 100644 index 00000000..11eadd37 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/pg_catalog.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/pg_catalog.cpython-312.pyc new file mode 100644 index 00000000..eae8d3aa Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/pg_catalog.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/psycopg.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/psycopg.cpython-312.pyc new file mode 100644 index 00000000..8e22fc2c Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/psycopg.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-312.pyc new file mode 100644 index 00000000..c1316fe8 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-312.pyc new file mode 100644 index 00000000..4a14ae8c Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-312.pyc new file mode 100644 index 00000000..3920e119 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/types.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/types.cpython-312.pyc new file mode 100644 index 00000000..f35e1613 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/dialects/postgresql/__pycache__/types.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/strategies.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/strategies.cpython-312.pyc new file mode 100644 index 00000000..1c765f86 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/engine/__pycache__/strategies.cpython-312.pyc differ diff --git a/Backend/venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/compiler.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/compiler.cpython-312.pyc new file mode 100644 index 00000000..01aa3669 Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/sqlalchemy/ext/__pycache__/compiler.cpython-312.pyc differ diff --git a/Frontend/src/App.tsx b/Frontend/src/App.tsx index b2b29c1f..b1ad70b0 100644 --- a/Frontend/src/App.tsx +++ b/Frontend/src/App.tsx @@ -60,6 +60,7 @@ const ProfilePage = lazy(() => import('./pages/customer/ProfilePage')); const LoyaltyPage = lazy(() => import('./pages/customer/LoyaltyPage')); const GroupBookingPage = lazy(() => import('./pages/customer/GroupBookingPage')); const ComplaintPage = lazy(() => import('./pages/customer/ComplaintPage')); +const GDPRPage = lazy(() => import('./pages/customer/GDPRPage')); const AboutPage = lazy(() => import('./features/content/pages/AboutPage')); const ContactPage = lazy(() => import('./features/content/pages/ContactPage')); const PrivacyPolicyPage = lazy(() => import('./features/content/pages/PrivacyPolicyPage')); @@ -97,6 +98,11 @@ const BlogManagementPage = lazy(() => import('./pages/admin/BlogManagementPage') const ComplaintManagementPage = lazy(() => import('./pages/admin/ComplaintManagementPage')); const FinancialAuditTrailPage = lazy(() => import('./pages/admin/FinancialAuditTrailPage')); const ComplianceReportingPage = lazy(() => import('./pages/admin/ComplianceReportingPage')); +const ApprovalManagementPage = lazy(() => import('./pages/admin/ApprovalManagementPage')); +const GDPRManagementPage = lazy(() => import('./pages/admin/GDPRManagementPage')); +const WebhookManagementPage = lazy(() => import('./pages/admin/WebhookManagementPage')); +const APIKeyManagementPage = lazy(() => import('./pages/admin/APIKeyManagementPage')); +const BackupManagementPage = lazy(() => import('./pages/admin/BackupManagementPage')); const StaffDashboardPage = lazy(() => import('./pages/staff/DashboardPage')); const StaffBookingManagementPage = lazy(() => import('./pages/staff/BookingManagementPage')); @@ -413,6 +419,14 @@ function App() { } /> + + + + } + /> {} @@ -545,6 +559,26 @@ function App() { path="compliance" element={} /> + } + /> + } + /> + } + /> + } + /> + } + /> {} diff --git a/Frontend/src/features/auth/components/CustomerRoute.tsx b/Frontend/src/features/auth/components/CustomerRoute.tsx index d6dde499..158d34e3 100644 --- a/Frontend/src/features/auth/components/CustomerRoute.tsx +++ b/Frontend/src/features/auth/components/CustomerRoute.tsx @@ -45,7 +45,8 @@ const CustomerRoute: React.FC = ({ } - const isCustomer = userInfo?.role !== 'admin' && userInfo?.role !== 'staff' && userInfo?.role !== 'accountant'; + // Explicitly check for customer role to prevent unexpected roles from accessing customer routes + const isCustomer = userInfo?.role === 'customer'; if (!isCustomer) { if (userInfo?.role === 'admin') { return ; diff --git a/Frontend/src/features/auth/services/sessionService.ts b/Frontend/src/features/auth/services/sessionService.ts new file mode 100644 index 00000000..1c9cf399 --- /dev/null +++ b/Frontend/src/features/auth/services/sessionService.ts @@ -0,0 +1,31 @@ +import apiClient from '../../../shared/services/apiClient'; + +export interface UserSession { + id: number; + ip_address?: string; + user_agent?: string; + device_info?: string; + last_activity: string; + created_at: string; + expires_at: string; +} + +class SessionService { + async getMySessions() { + const response = await apiClient.get('/sessions'); + return response.data; + } + + async revokeSession(sessionId: number) { + const response = await apiClient.delete(`/sessions/${sessionId}`); + return response.data; + } + + async revokeAllSessions() { + const response = await apiClient.post('/sessions/revoke-all'); + return response.data; + } +} + +export default new SessionService(); + diff --git a/Frontend/src/features/compliance/services/gdprService.ts b/Frontend/src/features/compliance/services/gdprService.ts new file mode 100644 index 00000000..d59e4c61 --- /dev/null +++ b/Frontend/src/features/compliance/services/gdprService.ts @@ -0,0 +1,65 @@ +import apiClient from '../../../shared/services/apiClient'; + +export interface GDPRRequest { + id: number; + request_type: 'data_export' | 'data_deletion' | 'data_rectification' | 'consent_withdrawal'; + status: 'pending' | 'processing' | 'completed' | 'rejected' | 'cancelled'; + user_id: number; + user_email: string; + created_at: string; + processed_at?: string; + verification_token?: string; + expires_at?: string; +} + +class GDPRService { + async requestDataExport() { + const response = await apiClient.post('/gdpr/export'); + return response.data; + } + + async getExportData(requestId: number, verificationToken: string) { + const response = await apiClient.get(`/gdpr/export/${requestId}`, { + params: { verification_token: verificationToken }, + }); + return response.data; + } + + async downloadExport(requestId: number, verificationToken: string) { + const response = await apiClient.get(`/gdpr/export/${requestId}`, { + params: { verification_token: verificationToken }, + responseType: 'blob', + }); + return response.data; + } + + async requestDataDeletion() { + const response = await apiClient.post('/gdpr/delete'); + return response.data; + } + + async confirmDataDeletion(requestId: number, verificationToken: string) { + const response = await apiClient.post(`/gdpr/delete/${requestId}/confirm`, null, { + params: { verification_token: verificationToken }, + }); + return response.data; + } + + async getMyRequests() { + const response = await apiClient.get('/gdpr/requests'); + return response.data; + } + + async getAllRequests(filters?: { status?: string; page?: number; limit?: number }) { + const response = await apiClient.get('/gdpr/admin/requests', { params: filters }); + return response.data; + } + + async deleteRequest(requestId: number) { + const response = await apiClient.delete(`/gdpr/admin/requests/${requestId}`); + return response.data; + } +} + +export default new GDPRService(); + diff --git a/Frontend/src/features/compliance/services/index.ts b/Frontend/src/features/compliance/services/index.ts new file mode 100644 index 00000000..33660fa2 --- /dev/null +++ b/Frontend/src/features/compliance/services/index.ts @@ -0,0 +1,3 @@ +export { default } from './gdprService'; +export * from './gdprService'; + diff --git a/Frontend/src/features/integrations/services/apiKeyService.ts b/Frontend/src/features/integrations/services/apiKeyService.ts new file mode 100644 index 00000000..e3dfad2f --- /dev/null +++ b/Frontend/src/features/integrations/services/apiKeyService.ts @@ -0,0 +1,55 @@ +import apiClient from '../../../shared/services/apiClient'; + +export interface APIKey { + id: number; + name: string; + key_prefix: string; + scopes: string[]; + rate_limit: number; + is_active: boolean; + last_used_at?: string; + expires_at?: string; + description?: string; + created_at: string; +} + +export interface CreateAPIKeyData { + name: string; + scopes: string[]; + description?: string; + rate_limit?: number; + expires_at?: string; +} + +export interface UpdateAPIKeyData { + name?: string; + scopes?: string[]; + description?: string; + rate_limit?: number; + expires_at?: string; +} + +class APIKeyService { + async createAPIKey(data: CreateAPIKeyData) { + const response = await apiClient.post('/api-keys', data); + return response.data; + } + + async getAPIKeys() { + const response = await apiClient.get('/api-keys'); + return response.data; + } + + async updateAPIKey(keyId: number, data: UpdateAPIKeyData) { + const response = await apiClient.put(`/api-keys/${keyId}`, data); + return response.data; + } + + async revokeAPIKey(keyId: number) { + const response = await apiClient.delete(`/api-keys/${keyId}`); + return response.data; + } +} + +export default new APIKeyService(); + diff --git a/Frontend/src/features/integrations/services/index.ts b/Frontend/src/features/integrations/services/index.ts new file mode 100644 index 00000000..166b46a3 --- /dev/null +++ b/Frontend/src/features/integrations/services/index.ts @@ -0,0 +1,5 @@ +export { default as webhookService } from './webhookService'; +export { default as apiKeyService } from './apiKeyService'; +export * from './webhookService'; +export * from './apiKeyService'; + diff --git a/Frontend/src/features/integrations/services/webhookService.ts b/Frontend/src/features/integrations/services/webhookService.ts new file mode 100644 index 00000000..1527fa22 --- /dev/null +++ b/Frontend/src/features/integrations/services/webhookService.ts @@ -0,0 +1,77 @@ +import apiClient from '../../../shared/services/apiClient'; + +export interface Webhook { + id: number; + name: string; + url: string; + events: string[]; + status: 'active' | 'inactive' | 'paused'; + secret?: string; + retry_count: number; + timeout_seconds: number; + description?: string; + created_at: string; +} + +export interface WebhookDelivery { + id: number; + webhook_id: number; + event_type: string; + event_id: string; + status: 'pending' | 'success' | 'failed' | 'retrying'; + response_status?: number; + attempt_count: number; + created_at: string; + delivered_at?: string; +} + +export interface CreateWebhookData { + name: string; + url: string; + events: string[]; + description?: string; + retry_count?: number; + timeout_seconds?: number; +} + +export interface UpdateWebhookData { + name?: string; + url?: string; + events?: string[]; + description?: string; + status?: string; + retry_count?: number; + timeout_seconds?: number; +} + +class WebhookService { + async createWebhook(data: CreateWebhookData) { + const response = await apiClient.post('/webhooks', data); + return response.data; + } + + async getWebhooks() { + const response = await apiClient.get('/webhooks'); + return response.data; + } + + async updateWebhook(webhookId: number, data: UpdateWebhookData) { + const response = await apiClient.put(`/webhooks/${webhookId}`, data); + return response.data; + } + + async deleteWebhook(webhookId: number) { + const response = await apiClient.delete(`/webhooks/${webhookId}`); + return response.data; + } + + async getWebhookDeliveries(webhookId: number, page: number = 1, limit: number = 50) { + const response = await apiClient.get(`/webhooks/${webhookId}/deliveries`, { + params: { page, limit }, + }); + return response.data; + } +} + +export default new WebhookService(); + diff --git a/Frontend/src/features/system/services/approvalService.ts b/Frontend/src/features/system/services/approvalService.ts new file mode 100644 index 00000000..7e9507d6 --- /dev/null +++ b/Frontend/src/features/system/services/approvalService.ts @@ -0,0 +1,54 @@ +import apiClient from '../../../shared/services/apiClient'; + +export interface ApprovalRequest { + id: number; + approval_type: string; + status: 'pending' | 'approved' | 'rejected' | 'cancelled'; + resource_type: string; + resource_id: number; + requested_by: number; + requested_at: string; + approved_by?: number; + approved_at?: string; + rejection_reason?: string; + priority: string; + notes?: string; + request_data?: any; + current_data?: any; +} + +export interface ApprovalFilters { + approval_type?: string; + page?: number; + limit?: number; +} + +class ApprovalService { + async getPendingApprovals(filters?: ApprovalFilters) { + const response = await apiClient.get('/approvals/pending', { params: filters }); + return response.data; + } + + async approveRequest(requestId: number, notes?: string) { + const response = await apiClient.post(`/approvals/${requestId}/approve`, { notes }); + return response.data; + } + + async rejectRequest(requestId: number, rejectionReason: string) { + const response = await apiClient.post(`/approvals/${requestId}/reject`, { rejection_reason: rejectionReason }); + return response.data; + } + + async getMyRequests(filters?: { status?: string; page?: number; limit?: number }) { + const response = await apiClient.get('/approvals/my-requests', { params: filters }); + return response.data; + } + + async cancelRequest(requestId: number) { + const response = await apiClient.delete(`/approvals/${requestId}`); + return response.data; + } +} + +export default new ApprovalService(); + diff --git a/Frontend/src/features/system/services/backupService.ts b/Frontend/src/features/system/services/backupService.ts new file mode 100644 index 00000000..1aa00cb5 --- /dev/null +++ b/Frontend/src/features/system/services/backupService.ts @@ -0,0 +1,43 @@ +import apiClient from '../../../shared/services/apiClient'; + +export interface Backup { + filename: string; + path: string; + size_bytes: number; + size_mb: number; + created_at: string; + database: string; + status: string; +} + +class BackupService { + async checkStatus() { + const response = await apiClient.get('/backups/status'); + return response.data; + } + + async createBackup() { + const response = await apiClient.post('/backups/create'); + return response.data; + } + + async listBackups() { + const response = await apiClient.get('/backups'); + return response.data; + } + + async downloadBackup(filename: string) { + const response = await apiClient.get(`/backups/${filename}`, { + responseType: 'blob', + }); + return response.data; + } + + async cleanupOldBackups() { + const response = await apiClient.post('/backups/cleanup'); + return response.data; + } +} + +export default new BackupService(); + diff --git a/Frontend/src/features/system/services/index.ts b/Frontend/src/features/system/services/index.ts index e69de29b..f1d5a5ff 100644 --- a/Frontend/src/features/system/services/index.ts +++ b/Frontend/src/features/system/services/index.ts @@ -0,0 +1,5 @@ +export { default as approvalService } from './approvalService'; +export { default as backupService } from './backupService'; +export * from './approvalService'; +export * from './backupService'; + diff --git a/Frontend/src/pages/accountant/DashboardPage.tsx b/Frontend/src/pages/accountant/DashboardPage.tsx index 00e15f11..4bd190dc 100644 --- a/Frontend/src/pages/accountant/DashboardPage.tsx +++ b/Frontend/src/pages/accountant/DashboardPage.tsx @@ -158,7 +158,7 @@ const AccountantDashboardPage: React.FC = () => { if (error || !stats) { return ( -
+
{ } return ( -
+
{/* Header */} -
-
-
-
-

+
+
+
+
+

Financial Dashboard

-

Comprehensive financial overview and analytics

+

Comprehensive financial overview and analytics

{/* Date Range & Actions */} -
-
+
+
setDateRange({ ...dateRange, from: e.target.value })} - className="px-4 py-2.5 bg-white border-2 border-slate-200 rounded-xl focus:border-emerald-400 focus:ring-4 focus:ring-emerald-100 transition-all duration-200 text-slate-700 font-medium shadow-sm hover:shadow-md" + className="flex-1 sm:flex-none px-3 sm:px-4 py-2 sm:py-2.5 bg-white border-2 border-slate-200 rounded-xl focus:border-emerald-400 focus:ring-4 focus:ring-emerald-100 transition-all duration-200 text-slate-700 font-medium shadow-sm hover:shadow-md text-sm sm:text-base" /> - to + to setDateRange({ ...dateRange, to: e.target.value })} - className="px-4 py-2.5 bg-white border-2 border-slate-200 rounded-xl focus:border-emerald-400 focus:ring-4 focus:ring-emerald-100 transition-all duration-200 text-slate-700 font-medium shadow-sm hover:shadow-md" + className="flex-1 sm:flex-none px-3 sm:px-4 py-2 sm:py-2.5 bg-white border-2 border-slate-200 rounded-xl focus:border-emerald-400 focus:ring-4 focus:ring-emerald-100 transition-all duration-200 text-slate-700 font-medium shadow-sm hover:shadow-md text-sm sm:text-base" />
-
+
{
@@ -252,62 +252,62 @@ const AccountantDashboardPage: React.FC = () => {
{/* Financial Summary Cards */} -
+
{/* Total Revenue */} -
+
-
-

Total Revenue

-

+

+

Total Revenue

+

{formatCurrency(financialSummary.totalRevenue || stats?.total_revenue || 0)}

-
- +
+
-
- - Active - All time revenue +
+ + Active + All time revenue
{/* Total Payments */} -
+
-
-

Total Payments

-

+

+

Total Payments

+

{financialSummary.totalPayments}

-
- +
+
-
- +
+ {financialSummary.pendingPayments} pending payments
{/* Total Invoices */} -
+
-
-

Total Invoices

-

+

+

Total Invoices

+

{financialSummary.totalInvoices}

-
- +
+
-
- +
+ {financialSummary.paidInvoices} paid • {financialSummary.overdueInvoices} overdue
@@ -315,17 +315,17 @@ const AccountantDashboardPage: React.FC = () => {
{/* Recent Payments and Invoices */} -
+
{/* Recent Payments */} -
-
-

+
+
+

Recent Payments

@@ -335,23 +335,23 @@ const AccountantDashboardPage: React.FC = () => {
) : recentPayments && recentPayments.length > 0 ? ( -
+
{recentPayments.slice(0, 5).map((payment) => (
navigate(`/accountant/payments`)} > -
-
- +
+
+
-

+

{formatCurrency(payment.amount)}

-
-

+

+

{payment.payment_method || 'N/A'}

{payment.payment_date && ( @@ -362,7 +362,7 @@ const AccountantDashboardPage: React.FC = () => {
- + {payment.payment_status.charAt(0).toUpperCase() + payment.payment_status.slice(1)}
@@ -381,15 +381,15 @@ const AccountantDashboardPage: React.FC = () => {
{/* Recent Invoices */} -
-
-

+
+
+

Recent Invoices

@@ -399,23 +399,23 @@ const AccountantDashboardPage: React.FC = () => {
) : recentInvoices && recentInvoices.length > 0 ? ( -
+
{recentInvoices.slice(0, 5).map((invoice) => (
navigate(`/accountant/invoices`)} > -
-
- +
+
+
-

+

{invoice.invoice_number}

-
-

+

+

{formatCurrency(invoice.total_amount || 0)}

{invoice.issue_date && ( @@ -426,7 +426,7 @@ const AccountantDashboardPage: React.FC = () => {
- + {invoice.status.charAt(0).toUpperCase() + invoice.status.slice(1)}
@@ -447,22 +447,22 @@ const AccountantDashboardPage: React.FC = () => { {/* Alerts Section */} {(financialSummary.overdueInvoices > 0 || financialSummary.pendingPayments > 0) && ( -
-
+
+
-

Financial Alerts

+

Financial Alerts

-
+
{financialSummary.overdueInvoices > 0 && ( -
-

+

+

⚠️ {financialSummary.overdueInvoices} overdue invoice{financialSummary.overdueInvoices !== 1 ? 's' : ''} require attention

)} {financialSummary.pendingPayments > 0 && ( -
-

+

+

⏳ {financialSummary.pendingPayments} pending payment{financialSummary.pendingPayments !== 1 ? 's' : ''} awaiting processing

diff --git a/Frontend/src/pages/admin/APIKeyManagementPage.tsx b/Frontend/src/pages/admin/APIKeyManagementPage.tsx new file mode 100644 index 00000000..08f05684 --- /dev/null +++ b/Frontend/src/pages/admin/APIKeyManagementPage.tsx @@ -0,0 +1,308 @@ +import React, { useEffect, useState } from 'react'; +import { Plus, Trash2, Copy, Eye, EyeOff, Edit } from 'lucide-react'; +import apiKeyService, { APIKey, CreateAPIKeyData, UpdateAPIKeyData } from '../../features/integrations/services/apiKeyService'; +import { toast } from 'react-toastify'; +import Loading from '../../shared/components/Loading'; +import { formatDate } from '../../shared/utils/format'; + +const APIKeyManagementPage: React.FC = () => { + const [apiKeys, setApiKeys] = useState([]); + const [loading, setLoading] = useState(true); + const [showModal, setShowModal] = useState(false); + const [editingKey, setEditingKey] = useState(null); + const [newKey, setNewKey] = useState(null); + const [formData, setFormData] = useState({ + name: '', + scopes: [], + description: '', + rate_limit: 100, + }); + + const availableScopes = [ + 'read:bookings', + 'write:bookings', + 'read:payments', + 'write:payments', + 'read:invoices', + 'write:invoices', + 'read:users', + 'write:users', + ]; + + useEffect(() => { + fetchAPIKeys(); + }, []); + + const fetchAPIKeys = async () => { + try { + setLoading(true); + const response = await apiKeyService.getAPIKeys(); + setApiKeys(response.data.api_keys || []); + } catch (error: any) { + toast.error(error.response?.data?.message || 'Unable to load API keys'); + } finally { + setLoading(false); + } + }; + + const handleSubmit = async (e: React.FormEvent) => { + e.preventDefault(); + try { + if (editingKey) { + await apiKeyService.updateAPIKey(editingKey.id, formData as UpdateAPIKeyData); + toast.success('API key updated successfully'); + setShowModal(false); + setEditingKey(null); + resetForm(); + fetchAPIKeys(); + } else { + const response = await apiKeyService.createAPIKey(formData); + toast.success('API key created successfully'); + setNewKey(response.data.key); + setShowModal(false); + resetForm(); + fetchAPIKeys(); + } + } catch (error: any) { + toast.error(error.response?.data?.message || `Unable to ${editingKey ? 'update' : 'create'} API key`); + } + }; + + const handleEdit = (key: APIKey) => { + setEditingKey(key); + setFormData({ + name: key.name, + scopes: key.scopes, + description: key.description || '', + rate_limit: key.rate_limit, + expires_at: key.expires_at || undefined, + }); + setShowModal(true); + }; + + const handleRevoke = async (id: number) => { + if (!confirm('Are you sure you want to revoke this API key?')) return; + + try { + await apiKeyService.revokeAPIKey(id); + toast.success('API key revoked'); + fetchAPIKeys(); + } catch (error: any) { + toast.error(error.response?.data?.message || 'Unable to revoke API key'); + } + }; + + const copyToClipboard = (text: string) => { + navigator.clipboard.writeText(text); + toast.success('Copied to clipboard'); + }; + + const resetForm = () => { + setFormData({ + name: '', + scopes: [], + description: '', + rate_limit: 100, + }); + setEditingKey(null); + }; + + const toggleScope = (scope: string) => { + setFormData({ + ...formData, + scopes: formData.scopes.includes(scope) + ? formData.scopes.filter(s => s !== scope) + : [...formData.scopes, scope], + }); + }; + + if (loading && apiKeys.length === 0) { + return ; + } + + return ( +
+
+
+
+
+

+ API Key Management +

+
+

Manage API keys for third-party access

+
+ +
+ + {newKey && ( +
+

API Key Created (save this - it won't be shown again):

+
+ {newKey} +
+ + +
+
+
+ )} + +
+ {apiKeys.length === 0 ? ( +
+

No API keys configured

+
+ ) : ( +
+ {apiKeys.map((key) => ( +
+
+
+
+

{key.name}

+ + {key.key_prefix}... + + + {key.is_active ? 'Active' : 'Revoked'} + +
+

+ Scopes: {key.scopes.join(', ')} +

+

+ Rate Limit: {key.rate_limit}/min • Created: {formatDate(key.created_at)} + {key.last_used_at && ` • Last Used: ${formatDate(key.last_used_at)}`} +

+
+
+ {key.is_active && ( + <> + + + + )} +
+
+
+ ))} +
+ )} +
+ + {showModal && ( +
+
+

+ {editingKey ? 'Edit API Key' : 'Create API Key'} +

+
+
+ + setFormData({ ...formData, name: e.target.value })} + className="w-full px-3 sm:px-4 py-2 bg-white border-2 border-slate-200 rounded-xl focus:border-blue-400 focus:ring-4 focus:ring-blue-100 transition-all duration-200 text-slate-700 font-medium shadow-sm hover:shadow-md text-sm sm:text-base" + required + /> +
+
+ +
+ {availableScopes.map((scope) => ( + + ))} +
+
+
+ + setFormData({ ...formData, rate_limit: parseInt(e.target.value) })} + className="w-full px-3 sm:px-4 py-2 bg-white border-2 border-slate-200 rounded-xl focus:border-blue-400 focus:ring-4 focus:ring-blue-100 transition-all duration-200 text-slate-700 font-medium shadow-sm hover:shadow-md text-sm sm:text-base" + min="1" + required + /> +
+
+ +