This commit is contained in:
Iliyan Angelov
2025-12-01 01:08:39 +02:00
parent 0fa2adeb19
commit 1a103a769f
234 changed files with 5513 additions and 283 deletions

Binary file not shown.

View File

@@ -0,0 +1,218 @@
"""
Add enterprise features: approval workflows, GDPR requests, sessions, webhooks, API keys.
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
revision = 'add_enterprise_features'
down_revision = 'add_sections_blog' # Depends on latest migration
branch_labels = None
depends_on = None
def upgrade() -> None:
# Approval workflow table
op.create_table(
'approval_requests',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('approval_type', sa.Enum('invoice_update', 'payment_refund', 'invoice_mark_paid', 'financial_adjustment', 'user_role_change', 'large_transaction', name='approvaltype'), nullable=False),
sa.Column('status', sa.Enum('pending', 'approved', 'rejected', 'cancelled', name='approvalstatus'), nullable=False),
sa.Column('requested_by', sa.Integer(), nullable=False),
sa.Column('requested_at', sa.DateTime(), nullable=False),
sa.Column('approved_by', sa.Integer(), nullable=True),
sa.Column('approved_at', sa.DateTime(), nullable=True),
sa.Column('rejection_reason', sa.Text(), nullable=True),
sa.Column('resource_type', sa.String(length=50), nullable=False),
sa.Column('resource_id', sa.Integer(), nullable=False),
sa.Column('request_data', sa.JSON(), nullable=True),
sa.Column('current_data', sa.JSON(), nullable=True),
sa.Column('priority', sa.String(length=20), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('extra_metadata', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['approved_by'], ['users.id'], ),
sa.ForeignKeyConstraint(['requested_by'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_approval_requests_id'), 'approval_requests', ['id'], unique=False)
op.create_index(op.f('ix_approval_requests_approval_type'), 'approval_requests', ['approval_type'], unique=False)
op.create_index(op.f('ix_approval_requests_status'), 'approval_requests', ['status'], unique=False)
op.create_index(op.f('ix_approval_requests_requested_by'), 'approval_requests', ['requested_by'], unique=False)
op.create_index(op.f('ix_approval_requests_approved_by'), 'approval_requests', ['approved_by'], unique=False)
op.create_index(op.f('ix_approval_requests_resource_type'), 'approval_requests', ['resource_type'], unique=False)
op.create_index(op.f('ix_approval_requests_resource_id'), 'approval_requests', ['resource_id'], unique=False)
# GDPR requests table
op.create_table(
'gdpr_requests',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('request_type', sa.Enum('data_export', 'data_deletion', 'data_rectification', 'consent_withdrawal', name='gdprrequesttype'), nullable=False),
sa.Column('status', sa.Enum('pending', 'processing', 'completed', 'rejected', 'cancelled', name='gdprrequeststatus'), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('user_email', sa.String(length=255), nullable=False),
sa.Column('request_data', sa.JSON(), nullable=True),
sa.Column('verification_token', sa.String(length=255), nullable=True),
sa.Column('verified_at', sa.DateTime(), nullable=True),
sa.Column('processed_by', sa.Integer(), nullable=True),
sa.Column('processed_at', sa.DateTime(), nullable=True),
sa.Column('processing_notes', sa.Text(), nullable=True),
sa.Column('export_file_path', sa.String(length=500), nullable=True),
sa.Column('deletion_log', sa.JSON(), nullable=True),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('user_agent', sa.String(length=255), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('expires_at', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['processed_by'], ['users.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_gdpr_requests_id'), 'gdpr_requests', ['id'], unique=False)
op.create_index(op.f('ix_gdpr_requests_request_type'), 'gdpr_requests', ['request_type'], unique=False)
op.create_index(op.f('ix_gdpr_requests_status'), 'gdpr_requests', ['status'], unique=False)
op.create_index(op.f('ix_gdpr_requests_user_id'), 'gdpr_requests', ['user_id'], unique=False)
op.create_index(op.f('ix_gdpr_requests_verification_token'), 'gdpr_requests', ['verification_token'], unique=True)
# User sessions table
op.create_table(
'user_sessions',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('session_token', sa.String(length=255), nullable=False),
sa.Column('refresh_token', sa.String(length=255), nullable=True),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('user_agent', sa.String(length=500), nullable=True),
sa.Column('device_info', sa.Text(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('last_activity', sa.DateTime(), nullable=False),
sa.Column('expires_at', sa.DateTime(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_user_sessions_id'), 'user_sessions', ['id'], unique=False)
op.create_index(op.f('ix_user_sessions_user_id'), 'user_sessions', ['user_id'], unique=False)
op.create_index(op.f('ix_user_sessions_session_token'), 'user_sessions', ['session_token'], unique=True)
op.create_index(op.f('ix_user_sessions_refresh_token'), 'user_sessions', ['refresh_token'], unique=True)
op.create_index(op.f('ix_user_sessions_is_active'), 'user_sessions', ['is_active'], unique=False)
op.create_index(op.f('ix_user_sessions_last_activity'), 'user_sessions', ['last_activity'], unique=False)
op.create_index(op.f('ix_user_sessions_expires_at'), 'user_sessions', ['expires_at'], unique=False)
# Webhooks table
op.create_table(
'webhooks',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('url', sa.String(length=500), nullable=False),
sa.Column('secret', sa.String(length=255), nullable=False),
sa.Column('events', sa.JSON(), nullable=False),
sa.Column('status', sa.Enum('active', 'inactive', 'paused', name='webhookstatus'), nullable=False),
sa.Column('retry_count', sa.Integer(), nullable=False),
sa.Column('timeout_seconds', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('created_by', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['created_by'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_webhooks_id'), 'webhooks', ['id'], unique=False)
op.create_index(op.f('ix_webhooks_status'), 'webhooks', ['status'], unique=False)
# Webhook deliveries table
op.create_table(
'webhook_deliveries',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('webhook_id', sa.Integer(), nullable=False),
sa.Column('event_type', sa.String(length=100), nullable=False),
sa.Column('event_id', sa.String(length=255), nullable=False),
sa.Column('status', sa.Enum('pending', 'success', 'failed', 'retrying', name='webhookdeliverystatus'), nullable=False),
sa.Column('payload', sa.JSON(), nullable=False),
sa.Column('response_status', sa.Integer(), nullable=True),
sa.Column('response_body', sa.Text(), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('attempt_count', sa.Integer(), nullable=False),
sa.Column('next_retry_at', sa.DateTime(), nullable=True),
sa.Column('delivered_at', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['webhook_id'], ['webhooks.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_webhook_deliveries_id'), 'webhook_deliveries', ['id'], unique=False)
op.create_index(op.f('ix_webhook_deliveries_webhook_id'), 'webhook_deliveries', ['webhook_id'], unique=False)
op.create_index(op.f('ix_webhook_deliveries_event_type'), 'webhook_deliveries', ['event_type'], unique=False)
op.create_index(op.f('ix_webhook_deliveries_event_id'), 'webhook_deliveries', ['event_id'], unique=False)
op.create_index(op.f('ix_webhook_deliveries_status'), 'webhook_deliveries', ['status'], unique=False)
op.create_index(op.f('ix_webhook_deliveries_created_at'), 'webhook_deliveries', ['created_at'], unique=False)
# API keys table
op.create_table(
'api_keys',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('key_hash', sa.String(length=255), nullable=False),
sa.Column('key_prefix', sa.String(length=20), nullable=False),
sa.Column('scopes', sa.JSON(), nullable=False),
sa.Column('rate_limit', sa.Integer(), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('expires_at', sa.DateTime(), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('last_used_at', sa.DateTime(), nullable=True),
sa.Column('created_by', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['created_by'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_api_keys_id'), 'api_keys', ['id'], unique=False)
op.create_index(op.f('ix_api_keys_key_hash'), 'api_keys', ['key_hash'], unique=True)
op.create_index(op.f('ix_api_keys_key_prefix'), 'api_keys', ['key_prefix'], unique=False)
op.create_index(op.f('ix_api_keys_is_active'), 'api_keys', ['is_active'], unique=False)
op.create_index(op.f('ix_api_keys_expires_at'), 'api_keys', ['expires_at'], unique=False)
def downgrade() -> None:
op.drop_index(op.f('ix_api_keys_expires_at'), table_name='api_keys')
op.drop_index(op.f('ix_api_keys_is_active'), table_name='api_keys')
op.drop_index(op.f('ix_api_keys_key_prefix'), table_name='api_keys')
op.drop_index(op.f('ix_api_keys_key_hash'), table_name='api_keys')
op.drop_index(op.f('ix_api_keys_id'), table_name='api_keys')
op.drop_table('api_keys')
op.drop_index(op.f('ix_webhook_deliveries_created_at'), table_name='webhook_deliveries')
op.drop_index(op.f('ix_webhook_deliveries_status'), table_name='webhook_deliveries')
op.drop_index(op.f('ix_webhook_deliveries_event_id'), table_name='webhook_deliveries')
op.drop_index(op.f('ix_webhook_deliveries_event_type'), table_name='webhook_deliveries')
op.drop_index(op.f('ix_webhook_deliveries_webhook_id'), table_name='webhook_deliveries')
op.drop_index(op.f('ix_webhook_deliveries_id'), table_name='webhook_deliveries')
op.drop_table('webhook_deliveries')
op.drop_index(op.f('ix_webhooks_status'), table_name='webhooks')
op.drop_index(op.f('ix_webhooks_id'), table_name='webhooks')
op.drop_table('webhooks')
op.drop_index(op.f('ix_user_sessions_expires_at'), table_name='user_sessions')
op.drop_index(op.f('ix_user_sessions_last_activity'), table_name='user_sessions')
op.drop_index(op.f('ix_user_sessions_is_active'), table_name='user_sessions')
op.drop_index(op.f('ix_user_sessions_refresh_token'), table_name='user_sessions')
op.drop_index(op.f('ix_user_sessions_session_token'), table_name='user_sessions')
op.drop_index(op.f('ix_user_sessions_user_id'), table_name='user_sessions')
op.drop_index(op.f('ix_user_sessions_id'), table_name='user_sessions')
op.drop_table('user_sessions')
op.drop_index(op.f('ix_gdpr_requests_verification_token'), table_name='gdpr_requests')
op.drop_index(op.f('ix_gdpr_requests_user_id'), table_name='gdpr_requests')
op.drop_index(op.f('ix_gdpr_requests_status'), table_name='gdpr_requests')
op.drop_index(op.f('ix_gdpr_requests_request_type'), table_name='gdpr_requests')
op.drop_index(op.f('ix_gdpr_requests_id'), table_name='gdpr_requests')
op.drop_table('gdpr_requests')
op.drop_index(op.f('ix_approval_requests_resource_id'), table_name='approval_requests')
op.drop_index(op.f('ix_approval_requests_resource_type'), table_name='approval_requests')
op.drop_index(op.f('ix_approval_requests_approved_by'), table_name='approval_requests')
op.drop_index(op.f('ix_approval_requests_requested_by'), table_name='approval_requests')
op.drop_index(op.f('ix_approval_requests_status'), table_name='approval_requests')
op.drop_index(op.f('ix_approval_requests_approval_type'), table_name='approval_requests')
op.drop_index(op.f('ix_approval_requests_id'), table_name='approval_requests')
op.drop_table('approval_requests')

View File

@@ -0,0 +1,25 @@
"""merge_enterprise_and_borica
Revision ID: dbafe747c931
Revises: add_enterprise_features, add_borica_payment_method
Create Date: 2025-12-01 00:42:31.999574
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'dbafe747c931'
down_revision = ('add_enterprise_features', 'add_borica_payment_method')
branch_labels = None
depends_on = None
def upgrade() -> None:
pass
def downgrade() -> None:
pass

View File

@@ -35,3 +35,12 @@ pytest-mock==3.12.0
# prometheus-client==0.19.0 # Uncomment for Prometheus metrics
# sentry-sdk==1.38.0 # Uncomment for Sentry error tracking
# System Dependencies (not installable via pip - must be installed separately)
# mysqldump: Required for database backup functionality
# Ubuntu/Debian: sudo apt-get install mysql-client
# CentOS/RHEL: sudo yum install mysql (or sudo dnf install mysql)
# macOS: brew install mysql-client
# Alpine: apk add mysql-client
# Or run: bash scripts/install_mysqldump.sh
# Check status: python scripts/check_dependencies.py

View File

@@ -0,0 +1,69 @@
#!/usr/bin/env python3
"""
Check for required system dependencies for the Hotel Booking platform.
"""
import subprocess
import shutil
import sys
import platform
def check_command(command, name, install_instructions):
"""Check if a command is available."""
if shutil.which(command):
version_output = subprocess.run(
[command, '--version'],
capture_output=True,
text=True,
timeout=5
)
version = version_output.stdout.split('\n')[0] if version_output.returncode == 0 else 'installed'
print(f"{name}: {version}")
return True
else:
print(f"{name}: NOT FOUND")
print(f" Installation: {install_instructions}")
return False
def main():
"""Check all dependencies."""
print("=" * 60)
print("Checking System Dependencies")
print("=" * 60)
print()
all_ok = True
# Check mysqldump
os_type = platform.system().lower()
if os_type == 'linux':
distro = platform.linux_distribution()[0].lower() if hasattr(platform, 'linux_distribution') else 'unknown'
if 'ubuntu' in distro or 'debian' in distro:
install_cmd = "sudo apt-get install mysql-client"
elif 'centos' in distro or 'rhel' in distro or 'fedora' in distro:
install_cmd = "sudo yum install mysql (or sudo dnf install mysql)"
else:
install_cmd = "Install MySQL client tools for your distribution"
elif os_type == 'darwin':
install_cmd = "brew install mysql-client"
else:
install_cmd = "Install MySQL client tools for your OS"
if not check_command('mysqldump', 'mysqldump (MySQL client)', install_cmd):
all_ok = False
print()
print("=" * 60)
if all_ok:
print("✓ All dependencies are installed!")
return 0
else:
print("✗ Some dependencies are missing.")
print()
print("You can install mysqldump using the provided script:")
print(" bash scripts/install_mysqldump.sh")
return 1
if __name__ == '__main__':
sys.exit(main())

View File

@@ -0,0 +1,43 @@
"""
User session management model.
"""
from sqlalchemy import Column, Integer, String, DateTime, ForeignKey, Boolean, Text
from sqlalchemy.orm import relationship
from datetime import datetime, timedelta
from ...shared.config.database import Base
from ...shared.config.settings import settings
class UserSession(Base):
__tablename__ = 'user_sessions'
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
user_id = Column(Integer, ForeignKey('users.id'), nullable=False, index=True)
session_token = Column(String(255), unique=True, nullable=False, index=True)
refresh_token = Column(String(255), unique=True, nullable=True, index=True)
# Session details
ip_address = Column(String(45), nullable=True)
user_agent = Column(String(500), nullable=True)
device_info = Column(Text, nullable=True) # JSON string with device details
# Session status
is_active = Column(Boolean, default=True, nullable=False, index=True)
last_activity = Column(DateTime, default=datetime.utcnow, nullable=False, index=True)
# Expiration
expires_at = Column(DateTime, nullable=False, index=True)
created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
# Relationships
user = relationship('User', foreign_keys=[user_id])
@property
def is_expired(self) -> bool:
"""Check if session is expired."""
return datetime.utcnow() > self.expires_at
@property
def is_valid(self) -> bool:
"""Check if session is valid (active and not expired)."""
return self.is_active and not self.is_expired

View File

@@ -14,6 +14,7 @@ from ...analytics.services.audit_service import audit_service
from slowapi import Limiter, _rate_limit_exceeded_handler
from slowapi.util import get_remote_address
from slowapi.errors import RateLimitExceeded
from functools import wraps
router = APIRouter(prefix='/auth', tags=['auth'])
@@ -282,7 +283,15 @@ async def get_profile(current_user: User=Depends(get_current_user), db: Session=
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e))
@router.put('/profile')
async def update_profile(profile_data: UpdateProfileRequest, current_user: User=Depends(get_current_user), db: Session=Depends(get_db)):
async def update_profile(
request: Request,
profile_data: UpdateProfileRequest,
current_user: User=Depends(get_current_user),
db: Session=Depends(get_db)
):
# Rate limiting is handled by global middleware (slowapi)
# The global rate limiter applies default limits to all endpoints
# For stricter limits on profile updates, configure in settings or use endpoint-specific limits
try:
user = await auth_service.update_profile(
db=db,
@@ -302,7 +311,7 @@ async def update_profile(profile_data: UpdateProfileRequest, current_user: User=
status_code = status.HTTP_404_NOT_FOUND
raise HTTPException(status_code=status_code, detail=error_message)
except Exception as e:
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f'An error occurred: {str(e)}')
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail='An error occurred while updating your profile. Please try again.')
@router.post('/forgot-password', response_model=MessageResponse)
async def forgot_password(request: ForgotPasswordRequest, db: Session=Depends(get_db)):

View File

@@ -0,0 +1,92 @@
"""
User session management routes.
"""
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.orm import Session
from ...shared.config.database import get_db
from ...shared.config.logging_config import get_logger
from ...security.middleware.auth import get_current_user
from ...auth.models.user import User
from ...auth.services.session_service import session_service
from ...shared.utils.response_helpers import success_response
logger = get_logger(__name__)
router = APIRouter(prefix='/sessions', tags=['sessions'])
@router.get('/')
async def get_my_sessions(
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Get current user's active sessions."""
try:
sessions = session_service.get_user_sessions(
db=db,
user_id=current_user.id,
active_only=True
)
return success_response(data={
'sessions': [{
'id': s.id,
'ip_address': s.ip_address,
'user_agent': s.user_agent,
'device_info': s.device_info,
'last_activity': s.last_activity.isoformat() if s.last_activity else None,
'created_at': s.created_at.isoformat() if s.created_at else None,
'expires_at': s.expires_at.isoformat() if s.expires_at else None
} for s in sessions]
})
except Exception as e:
logger.error(f'Error getting sessions: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.delete('/{session_id}')
async def revoke_session(
session_id: int,
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Revoke a specific session."""
try:
# Verify session belongs to user
from ...auth.models.user_session import UserSession
session = db.query(UserSession).filter(
UserSession.id == session_id,
UserSession.user_id == current_user.id
).first()
if not session:
raise HTTPException(status_code=404, detail='Session not found')
success = session_service.revoke_session(db=db, session_token=session.session_token)
if not success:
raise HTTPException(status_code=404, detail='Session not found')
return success_response(message='Session revoked successfully')
except HTTPException:
raise
except Exception as e:
logger.error(f'Error revoking session: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.post('/revoke-all')
async def revoke_all_sessions(
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Revoke all sessions for current user."""
try:
count = session_service.revoke_all_user_sessions(
db=db,
user_id=current_user.id
)
return success_response(
data={'revoked_count': count},
message=f'Revoked {count} session(s)'
)
except Exception as e:
logger.error(f'Error revoking all sessions: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))

View File

@@ -21,9 +21,10 @@ async def get_users(search: Optional[str]=Query(None), role: Optional[str]=Query
if search:
query = query.filter(or_(User.full_name.like(f'%{search}%'), User.email.like(f'%{search}%'), User.phone.like(f'%{search}%')))
if role:
role_map = {'admin': 1, 'staff': 2, 'customer': 3, 'accountant': 4}
if role in role_map:
query = query.filter(User.role_id == role_map[role])
# Query role by name instead of hardcoded IDs for flexibility
role_obj = db.query(Role).filter(Role.name == role).first()
if role_obj:
query = query.filter(User.role_id == role_obj.id)
if status_filter:
is_active = status_filter == 'active'
query = query.filter(User.is_active == is_active)

View File

@@ -372,7 +372,8 @@ class AuthService:
if full_name is not None:
user.full_name = full_name
if email is not None:
# Normalize email (lowercase and trim)
email = email.lower().strip()
existing_user = db.query(User).filter(
User.email == email,
User.id != user_id

View File

@@ -0,0 +1,149 @@
"""
User session management service.
"""
from sqlalchemy.orm import Session
from typing import Optional, List
from datetime import datetime, timedelta
import secrets
from ..models.user_session import UserSession
from ...shared.config.settings import settings
from ...shared.config.logging_config import get_logger
logger = get_logger(__name__)
class SessionService:
"""Service for managing user sessions."""
@staticmethod
def create_session(
db: Session,
user_id: int,
ip_address: Optional[str] = None,
user_agent: Optional[str] = None,
device_info: Optional[str] = None
) -> UserSession:
"""Create a new user session."""
session_token = secrets.token_urlsafe(64)
refresh_token = secrets.token_urlsafe(64)
# Calculate expiration
expires_at = datetime.utcnow() + timedelta(minutes=settings.JWT_ACCESS_TOKEN_EXPIRE_MINUTES)
session = UserSession(
user_id=user_id,
session_token=session_token,
refresh_token=refresh_token,
ip_address=ip_address,
user_agent=user_agent,
device_info=device_info,
expires_at=expires_at
)
db.add(session)
db.commit()
db.refresh(session)
logger.info(f'Session created for user {user_id}')
return session
@staticmethod
def get_session(
db: Session,
session_token: str
) -> Optional[UserSession]:
"""Get a session by token."""
return db.query(UserSession).filter(
UserSession.session_token == session_token,
UserSession.is_active == True
).first()
@staticmethod
def update_session_activity(
db: Session,
session_token: str
) -> Optional[UserSession]:
"""Update session last activity timestamp."""
session = SessionService.get_session(db, session_token)
if session and session.is_valid:
session.last_activity = datetime.utcnow()
db.commit()
db.refresh(session)
return session
@staticmethod
def revoke_session(
db: Session,
session_token: str
) -> bool:
"""Revoke a session."""
session = db.query(UserSession).filter(
UserSession.session_token == session_token
).first()
if session:
session.is_active = False
db.commit()
logger.info(f'Session {session.id} revoked')
return True
return False
@staticmethod
def revoke_all_user_sessions(
db: Session,
user_id: int,
exclude_token: Optional[str] = None
) -> int:
"""Revoke all sessions for a user, optionally excluding one."""
query = db.query(UserSession).filter(
UserSession.user_id == user_id,
UserSession.is_active == True
)
if exclude_token:
query = query.filter(UserSession.session_token != exclude_token)
sessions = query.all()
count = len(sessions)
for session in sessions:
session.is_active = False
db.commit()
logger.info(f'Revoked {count} sessions for user {user_id}')
return count
@staticmethod
def get_user_sessions(
db: Session,
user_id: int,
active_only: bool = True
) -> List[UserSession]:
"""Get all sessions for a user."""
query = db.query(UserSession).filter(UserSession.user_id == user_id)
if active_only:
query = query.filter(
UserSession.is_active == True,
UserSession.expires_at > datetime.utcnow()
)
return query.order_by(UserSession.last_activity.desc()).all()
@staticmethod
def cleanup_expired_sessions(db: Session) -> int:
"""Clean up expired sessions."""
expired = db.query(UserSession).filter(
UserSession.expires_at < datetime.utcnow(),
UserSession.is_active == True
).all()
count = len(expired)
for session in expired:
session.is_active = False
db.commit()
logger.info(f'Cleaned up {count} expired sessions')
return count
session_service = SessionService()

View File

@@ -306,7 +306,9 @@ async def create_booking(booking_data: CreateBookingRequest, current_user: User=
if payment_method in ['stripe', 'paypal']:
initial_status = BookingStatus.pending
final_notes = notes or ''
# Sanitize user-provided notes to prevent XSS
from html import escape
final_notes = escape(notes) if notes else ''
if promotion_code:
promotion_note = f'Promotion Code: {promotion_code}'
final_notes = f'{promotion_note}\n{final_notes}'.strip() if final_notes else promotion_note
@@ -507,23 +509,36 @@ async def create_booking(booking_data: CreateBookingRequest, current_user: User=
return success_response(data={'booking': booking_dict}, message=message)
except HTTPException:
if 'transaction' in locals():
transaction.rollback()
try:
transaction.rollback()
except Exception:
pass
raise
except IntegrityError as e:
transaction.rollback()
if 'transaction' in locals():
try:
transaction.rollback()
except Exception:
pass
logger.error(f'Database integrity error during booking creation: {str(e)}')
raise HTTPException(status_code=409, detail='Booking conflict detected. Please try again.')
except Exception as e:
if 'transaction' in locals():
transaction.rollback()
try:
transaction.rollback()
except Exception:
pass
logger.error(f'Error creating booking: {str(e)}', exc_info=True)
import logging
import traceback
logger = logging.getLogger(__name__)
logger.error(f'Error creating booking (payment_method: {payment_method}): {str(e)}')
logger.error(f'Traceback: {traceback.format_exc()}')
db.rollback()
raise HTTPException(status_code=500, detail=str(e))
try:
db.rollback()
except Exception:
pass
raise HTTPException(status_code=500, detail='An error occurred while creating the booking. Please try again.')
@router.get('/{id}')
async def get_booking_by_id(id: int, request: Request, current_user: User=Depends(get_current_user), db: Session=Depends(get_db)):
@@ -772,7 +787,9 @@ async def update_booking(id: int, booking_data: UpdateBookingRequest, current_us
booking.num_guests = booking_data.guest_count
if booking_data.notes is not None:
booking.special_requests = booking_data.notes
# Sanitize user-provided notes to prevent XSS
from html import escape
booking.special_requests = escape(booking_data.notes)
# Restrict staff from modifying booking prices (only admin can)
if booking_data.total_price is not None:
@@ -1060,7 +1077,9 @@ async def admin_create_booking(booking_data: AdminCreateBookingRequest, current_
except ValueError:
initial_status = BookingStatus.confirmed
final_notes = notes or ''
# Sanitize user-provided notes to prevent XSS
from html import escape
final_notes = escape(notes) if notes else ''
if promotion_code:
promotion_note = f'Promotion Code: {promotion_code}'
final_notes = f'{promotion_note}\n{final_notes}'.strip() if final_notes else promotion_note
@@ -1320,15 +1339,24 @@ async def admin_create_booking(booking_data: AdminCreateBookingRequest, current_
)
except HTTPException:
if 'transaction' in locals():
transaction.rollback()
try:
transaction.rollback()
except Exception:
pass
raise
except IntegrityError as e:
if 'transaction' in locals():
transaction.rollback()
try:
transaction.rollback()
except Exception:
pass
logger.error(f'Database integrity error during admin booking creation: {str(e)}')
raise HTTPException(status_code=409, detail='Booking conflict detected. Please try again.')
except Exception as e:
if 'transaction' in locals():
transaction.rollback()
try:
transaction.rollback()
except Exception:
pass
logger.error(f'Error creating booking (admin/staff): {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail='An error occurred while creating the booking')
raise HTTPException(status_code=500, detail='An error occurred while creating the booking. Please try again.')

View File

@@ -0,0 +1,4 @@
"""
GDPR compliance module.
"""

View File

@@ -0,0 +1,58 @@
"""
GDPR compliance models for data export and deletion requests.
"""
from sqlalchemy import Column, Integer, String, Text, DateTime, ForeignKey, Enum, JSON
from sqlalchemy.orm import relationship
from datetime import datetime
import enum
from ...shared.config.database import Base
class GDPRRequestType(str, enum.Enum):
data_export = "data_export"
data_deletion = "data_deletion"
data_rectification = "data_rectification"
consent_withdrawal = "consent_withdrawal"
class GDPRRequestStatus(str, enum.Enum):
pending = "pending"
processing = "processing"
completed = "completed"
rejected = "rejected"
cancelled = "cancelled"
class GDPRRequest(Base):
__tablename__ = 'gdpr_requests'
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
request_type = Column(Enum(GDPRRequestType), nullable=False, index=True)
status = Column(Enum(GDPRRequestStatus), default=GDPRRequestStatus.pending, nullable=False, index=True)
# User making the request
user_id = Column(Integer, ForeignKey('users.id'), nullable=False, index=True)
user_email = Column(String(255), nullable=False) # Store email even if user is deleted
# Request details
request_data = Column(JSON, nullable=True) # Additional request parameters
verification_token = Column(String(255), nullable=True, unique=True, index=True)
verified_at = Column(DateTime, nullable=True)
# Processing
processed_by = Column(Integer, ForeignKey('users.id'), nullable=True)
processed_at = Column(DateTime, nullable=True)
processing_notes = Column(Text, nullable=True)
# Export/deletion details
export_file_path = Column(String(500), nullable=True) # Path to exported data file
deletion_log = Column(JSON, nullable=True) # Log of what was deleted
# Metadata
ip_address = Column(String(45), nullable=True)
user_agent = Column(String(255), nullable=True)
created_at = Column(DateTime, default=datetime.utcnow, nullable=False, index=True)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
expires_at = Column(DateTime, nullable=True) # For export links expiration
# Relationships
user = relationship('User', foreign_keys=[user_id])
processor = relationship('User', foreign_keys=[processed_by])

View File

@@ -0,0 +1,4 @@
"""
GDPR compliance routes.
"""

View File

@@ -0,0 +1,272 @@
"""
GDPR compliance routes for data export and deletion.
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Response
from sqlalchemy.orm import Session, noload
from typing import Optional
from ...shared.config.database import get_db
from ...shared.config.logging_config import get_logger
from ...security.middleware.auth import get_current_user, authorize_roles
from ...auth.models.user import User
from ..services.gdpr_service import gdpr_service
from ..models.gdpr_request import GDPRRequest, GDPRRequestType, GDPRRequestStatus
from ...shared.utils.response_helpers import success_response
from fastapi import Request
logger = get_logger(__name__)
router = APIRouter(prefix='/gdpr', tags=['gdpr'])
@router.post('/export')
async def request_data_export(
request: Request,
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Request export of user's personal data (GDPR)."""
try:
client_ip = request.client.host if request.client else None
user_agent = request.headers.get('User-Agent')
gdpr_request = await gdpr_service.create_data_export_request(
db=db,
user_id=current_user.id,
ip_address=client_ip,
user_agent=user_agent
)
return success_response(
data={
'request_id': gdpr_request.id,
'verification_token': gdpr_request.verification_token,
'status': gdpr_request.status.value,
'expires_at': gdpr_request.expires_at.isoformat() if gdpr_request.expires_at else None
},
message='Data export request created. You will receive an email with download link once ready.'
)
except Exception as e:
logger.error(f'Error creating data export request: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.get('/export/{request_id}')
async def get_export_data(
request_id: int,
verification_token: str = Query(...),
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Get exported user data."""
try:
gdpr_request = db.query(GDPRRequest).options(
noload(GDPRRequest.user),
noload(GDPRRequest.processor)
).filter(
GDPRRequest.id == request_id,
GDPRRequest.user_id == current_user.id,
GDPRRequest.verification_token == verification_token,
GDPRRequest.request_type == GDPRRequestType.data_export
).first()
if not gdpr_request:
raise HTTPException(status_code=404, detail='Export request not found or invalid token')
if gdpr_request.status == GDPRRequestStatus.pending:
# Process export
export_data = await gdpr_service.export_user_data(
db=db,
user_id=current_user.id,
request_id=request_id
)
return success_response(data=export_data)
elif gdpr_request.status == GDPRRequestStatus.completed and gdpr_request.export_file_path:
# Return file if exists
from pathlib import Path
file_path = Path(gdpr_request.export_file_path)
if file_path.exists():
return Response(
content=file_path.read_bytes(),
media_type='application/json',
headers={
'Content-Disposition': f'attachment; filename="user_data_export_{request_id}.json"'
}
)
raise HTTPException(status_code=404, detail='Export file not found')
except HTTPException:
raise
except Exception as e:
logger.error(f'Error getting export data: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.post('/delete')
async def request_data_deletion(
request: Request,
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Request deletion of user's personal data (GDPR - Right to be Forgotten)."""
try:
client_ip = request.client.host if request.client else None
user_agent = request.headers.get('User-Agent')
gdpr_request = await gdpr_service.create_data_deletion_request(
db=db,
user_id=current_user.id,
ip_address=client_ip,
user_agent=user_agent
)
return success_response(
data={
'request_id': gdpr_request.id,
'verification_token': gdpr_request.verification_token,
'status': gdpr_request.status.value
},
message='Data deletion request created. Please verify via email to proceed.'
)
except Exception as e:
logger.error(f'Error creating data deletion request: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.post('/delete/{request_id}/confirm')
async def confirm_data_deletion(
request_id: int,
verification_token: str = Query(...),
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Confirm and process data deletion request."""
try:
gdpr_request = db.query(GDPRRequest).options(
noload(GDPRRequest.user),
noload(GDPRRequest.processor)
).filter(
GDPRRequest.id == request_id,
GDPRRequest.user_id == current_user.id,
GDPRRequest.verification_token == verification_token,
GDPRRequest.request_type == GDPRRequestType.data_deletion,
GDPRRequest.status == GDPRRequestStatus.pending
).first()
if not gdpr_request:
raise HTTPException(status_code=404, detail='Deletion request not found or already processed')
# Process deletion
deletion_log = await gdpr_service.delete_user_data(
db=db,
user_id=current_user.id,
request_id=request_id,
processed_by=current_user.id
)
return success_response(
data=deletion_log,
message='Your data has been deleted successfully.'
)
except HTTPException:
raise
except Exception as e:
logger.error(f'Error processing data deletion: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.get('/requests')
async def get_user_gdpr_requests(
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Get user's GDPR requests."""
try:
requests = db.query(GDPRRequest).options(
noload(GDPRRequest.user),
noload(GDPRRequest.processor)
).filter(
GDPRRequest.user_id == current_user.id
).order_by(GDPRRequest.created_at.desc()).all()
return success_response(data={
'requests': [{
'id': req.id,
'request_type': req.request_type.value,
'status': req.status.value,
'created_at': req.created_at.isoformat() if req.created_at else None,
'processed_at': req.processed_at.isoformat() if req.processed_at else None,
} for req in requests]
})
except Exception as e:
logger.error(f'Error getting GDPR requests: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.get('/admin/requests')
async def get_all_gdpr_requests(
status: Optional[str] = Query(None),
page: int = Query(1, ge=1),
limit: int = Query(20, ge=1, le=100),
current_user: User = Depends(authorize_roles('admin')),
db: Session = Depends(get_db)
):
"""Get all GDPR requests (admin only)."""
try:
query = db.query(GDPRRequest).options(
noload(GDPRRequest.user),
noload(GDPRRequest.processor)
)
if status:
try:
status_enum = GDPRRequestStatus(status)
query = query.filter(GDPRRequest.status == status_enum)
except ValueError:
raise HTTPException(status_code=400, detail=f'Invalid status: {status}')
total = query.count()
offset = (page - 1) * limit
requests = query.order_by(GDPRRequest.created_at.desc()).offset(offset).limit(limit).all()
return success_response(data={
'requests': [{
'id': req.id,
'request_type': req.request_type.value,
'status': req.status.value,
'user_email': req.user_email,
'created_at': req.created_at.isoformat() if req.created_at else None,
'processed_at': req.processed_at.isoformat() if req.processed_at else None,
} for req in requests],
'pagination': {
'page': page,
'limit': limit,
'total': total,
'total_pages': (total + limit - 1) // limit
}
})
except HTTPException:
raise
except Exception as e:
logger.error(f'Error getting GDPR requests: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.delete('/admin/requests/{request_id}')
async def delete_gdpr_request(
request_id: int,
current_user: User = Depends(authorize_roles('admin')),
db: Session = Depends(get_db)
):
"""Delete a GDPR request (admin only)."""
try:
gdpr_request = db.query(GDPRRequest).options(
noload(GDPRRequest.user),
noload(GDPRRequest.processor)
).filter(GDPRRequest.id == request_id).first()
if not gdpr_request:
raise HTTPException(status_code=404, detail='GDPR request not found')
db.delete(gdpr_request)
db.commit()
logger.info(f'GDPR request {request_id} deleted by admin {current_user.id}')
return success_response(message='GDPR request deleted successfully')
except HTTPException:
raise
except Exception as e:
logger.error(f'Error deleting GDPR request: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))

View File

@@ -0,0 +1,295 @@
"""
GDPR compliance service for data export and deletion.
"""
from sqlalchemy.orm import Session
from typing import Dict, Any, Optional, List
from datetime import datetime, timedelta
import json
import secrets
import os
from pathlib import Path
from ..models.gdpr_request import GDPRRequest, GDPRRequestType, GDPRRequestStatus
from ...auth.models.user import User
from ...bookings.models.booking import Booking
from ...payments.models.payment import Payment
from ...payments.models.invoice import Invoice
from ...reviews.models.review import Review
from ...shared.config.logging_config import get_logger
from ...shared.config.settings import settings
from ...analytics.services.audit_service import audit_service
logger = get_logger(__name__)
class GDPRService:
"""Service for GDPR compliance operations."""
EXPORT_EXPIRY_DAYS = 7 # Export links expire after 7 days
@staticmethod
async def create_data_export_request(
db: Session,
user_id: int,
ip_address: Optional[str] = None,
user_agent: Optional[str] = None
) -> GDPRRequest:
"""Create a data export request."""
user = db.query(User).filter(User.id == user_id).first()
if not user:
raise ValueError('User not found')
verification_token = secrets.token_urlsafe(32)
expires_at = datetime.utcnow() + timedelta(days=GDPRService.EXPORT_EXPIRY_DAYS)
gdpr_request = GDPRRequest(
request_type=GDPRRequestType.data_export,
status=GDPRRequestStatus.pending,
user_id=user_id,
user_email=user.email,
verification_token=verification_token,
ip_address=ip_address,
user_agent=user_agent,
expires_at=expires_at
)
db.add(gdpr_request)
db.commit()
db.refresh(gdpr_request)
# Log GDPR request
await audit_service.log_action(
db=db,
action='gdpr_export_requested',
resource_type='gdpr_request',
user_id=user_id,
resource_id=gdpr_request.id,
ip_address=ip_address,
user_agent=user_agent,
details={'request_type': 'data_export'},
status='success'
)
logger.info(f'GDPR export request created: {gdpr_request.id} for user {user_id}')
return gdpr_request
@staticmethod
async def create_data_deletion_request(
db: Session,
user_id: int,
ip_address: Optional[str] = None,
user_agent: Optional[str] = None
) -> GDPRRequest:
"""Create a data deletion request (right to be forgotten)."""
user = db.query(User).filter(User.id == user_id).first()
if not user:
raise ValueError('User not found')
verification_token = secrets.token_urlsafe(32)
gdpr_request = GDPRRequest(
request_type=GDPRRequestType.data_deletion,
status=GDPRRequestStatus.pending,
user_id=user_id,
user_email=user.email,
verification_token=verification_token,
ip_address=ip_address,
user_agent=user_agent
)
db.add(gdpr_request)
db.commit()
db.refresh(gdpr_request)
# Log GDPR request
await audit_service.log_action(
db=db,
action='gdpr_deletion_requested',
resource_type='gdpr_request',
user_id=user_id,
resource_id=gdpr_request.id,
ip_address=ip_address,
user_agent=user_agent,
details={'request_type': 'data_deletion'},
status='success'
)
logger.info(f'GDPR deletion request created: {gdpr_request.id} for user {user_id}')
return gdpr_request
@staticmethod
async def export_user_data(
db: Session,
user_id: int,
request_id: Optional[int] = None
) -> Dict[str, Any]:
"""Export all user data in JSON format."""
user = db.query(User).filter(User.id == user_id).first()
if not user:
raise ValueError('User not found')
# Collect all user data
export_data = {
'user': {
'id': user.id,
'email': user.email,
'full_name': user.full_name,
'phone': user.phone,
'address': user.address,
'currency': getattr(user, 'currency', None),
'created_at': user.created_at.isoformat() if user.created_at else None,
'updated_at': user.updated_at.isoformat() if user.updated_at else None,
},
'bookings': [],
'payments': [],
'invoices': [],
'reviews': [],
'export_date': datetime.utcnow().isoformat()
}
# Get bookings
bookings = db.query(Booking).filter(Booking.user_id == user_id).all()
for booking in bookings:
export_data['bookings'].append({
'id': booking.id,
'booking_number': booking.booking_number,
'check_in_date': booking.check_in_date.isoformat() if booking.check_in_date else None,
'check_out_date': booking.check_out_date.isoformat() if booking.check_out_date else None,
'status': booking.status.value if hasattr(booking.status, 'value') else str(booking.status),
'total_price': float(booking.total_price) if booking.total_price else None,
'created_at': booking.created_at.isoformat() if booking.created_at else None,
})
# Get payments
payments = db.query(Payment).filter(Payment.user_id == user_id).all()
for payment in payments:
export_data['payments'].append({
'id': payment.id,
'amount': float(payment.amount) if payment.amount else None,
'payment_method': payment.payment_method.value if hasattr(payment.payment_method, 'value') else str(payment.payment_method),
'payment_status': payment.payment_status.value if hasattr(payment.payment_status, 'value') else str(payment.payment_status),
'payment_date': payment.payment_date.isoformat() if payment.payment_date else None,
'created_at': payment.created_at.isoformat() if payment.created_at else None,
})
# Get invoices
invoices = db.query(Invoice).filter(Invoice.user_id == user_id).all()
for invoice in invoices:
export_data['invoices'].append({
'id': invoice.id,
'invoice_number': invoice.invoice_number,
'total_amount': float(invoice.total_amount) if invoice.total_amount else None,
'status': invoice.status.value if hasattr(invoice.status, 'value') else str(invoice.status),
'issue_date': invoice.issue_date.isoformat() if invoice.issue_date else None,
})
# Get reviews
reviews = db.query(Review).filter(Review.user_id == user_id).all()
for review in reviews:
export_data['reviews'].append({
'id': review.id,
'rating': review.rating,
'comment': review.comment,
'created_at': review.created_at.isoformat() if review.created_at else None,
})
# Save export file
if request_id:
export_dir = Path(settings.UPLOAD_DIR) / 'gdpr_exports'
export_dir.mkdir(parents=True, exist_ok=True)
filename = f'user_{user_id}_export_{datetime.utcnow().strftime("%Y%m%d_%H%M%S")}.json'
file_path = export_dir / filename
with open(file_path, 'w', encoding='utf-8') as f:
json.dump(export_data, f, indent=2, ensure_ascii=False)
# Update GDPR request
gdpr_request = db.query(GDPRRequest).filter(GDPRRequest.id == request_id).first()
if gdpr_request:
gdpr_request.export_file_path = str(file_path)
gdpr_request.status = GDPRRequestStatus.completed
gdpr_request.processed_at = datetime.utcnow()
db.commit()
return export_data
@staticmethod
async def delete_user_data(
db: Session,
user_id: int,
request_id: Optional[int] = None,
processed_by: Optional[int] = None
) -> Dict[str, Any]:
"""Delete all user data (right to be forgotten)."""
user = db.query(User).filter(User.id == user_id).first()
if not user:
raise ValueError('User not found')
deletion_log = {
'user_id': user_id,
'user_email': user.email,
'deleted_at': datetime.utcnow().isoformat(),
'deleted_items': []
}
# Anonymize bookings (keep for business records but remove personal data)
bookings = db.query(Booking).filter(Booking.user_id == user_id).all()
for booking in bookings:
# Keep booking but anonymize
booking.user_id = None # Or set to a system user
deletion_log['deleted_items'].append(f'booking_{booking.id}_anonymized')
# Anonymize payments
payments = db.query(Payment).filter(Payment.user_id == user_id).all()
for payment in payments:
payment.user_id = None
deletion_log['deleted_items'].append(f'payment_{payment.id}_anonymized')
# Anonymize invoices
invoices = db.query(Invoice).filter(Invoice.user_id == user_id).all()
for invoice in invoices:
invoice.user_id = None
invoice.customer_name = 'Deleted User'
invoice.customer_email = 'deleted@example.com'
deletion_log['deleted_items'].append(f'invoice_{invoice.id}_anonymized')
# Delete reviews
reviews = db.query(Review).filter(Review.user_id == user_id).all()
for review in reviews:
db.delete(review)
deletion_log['deleted_items'].append(f'review_{review.id}_deleted')
# Deactivate user account
user.is_active = False
user.email = f'deleted_{user.id}@deleted.local'
user.full_name = 'Deleted User'
user.phone = None
user.address = None
db.commit()
# Update GDPR request
if request_id:
gdpr_request = db.query(GDPRRequest).filter(GDPRRequest.id == request_id).first()
if gdpr_request:
gdpr_request.status = GDPRRequestStatus.completed
gdpr_request.processed_by = processed_by
gdpr_request.processed_at = datetime.utcnow()
gdpr_request.deletion_log = deletion_log
db.commit()
# Log deletion
await audit_service.log_action(
db=db,
action='gdpr_data_deleted',
resource_type='gdpr_request',
user_id=processed_by,
resource_id=request_id,
details=deletion_log,
status='success'
)
logger.info(f'User data deleted for user {user_id}')
return deletion_log
gdpr_service = GDPRService()

View File

@@ -0,0 +1,4 @@
"""
Integration module for webhooks and API keys.
"""

View File

@@ -0,0 +1,48 @@
"""
API key models for third-party access.
"""
from sqlalchemy import Column, Integer, String, Text, DateTime, ForeignKey, Boolean, JSON
from sqlalchemy.orm import relationship
from datetime import datetime, timedelta
from ...shared.config.database import Base
class APIKey(Base):
__tablename__ = 'api_keys'
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
name = Column(String(255), nullable=False)
key_hash = Column(String(255), unique=True, nullable=False, index=True) # Hashed API key
key_prefix = Column(String(20), nullable=False, index=True) # First 8 chars for identification
# Permissions
scopes = Column(JSON, nullable=False) # List of allowed scopes/permissions
rate_limit = Column(Integer, default=100, nullable=False) # Requests per minute
# Status
is_active = Column(Boolean, default=True, nullable=False, index=True)
# Expiration
expires_at = Column(DateTime, nullable=True, index=True)
# Metadata
description = Column(Text, nullable=True)
last_used_at = Column(DateTime, nullable=True)
created_by = Column(Integer, ForeignKey('users.id'), nullable=True)
created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
# Relationships
creator = relationship('User', foreign_keys=[created_by])
@property
def is_expired(self) -> bool:
"""Check if API key is expired."""
if not self.expires_at:
return False
return datetime.utcnow() > self.expires_at
@property
def is_valid(self) -> bool:
"""Check if API key is valid (active and not expired)."""
return self.is_active and not self.is_expired

View File

@@ -0,0 +1,84 @@
"""
Webhook models for external integrations.
"""
from sqlalchemy import Column, Integer, String, Text, DateTime, ForeignKey, Boolean, Enum, JSON
from sqlalchemy.orm import relationship
from datetime import datetime
import enum
from ...shared.config.database import Base
class WebhookEventType(str, enum.Enum):
booking_created = "booking.created"
booking_updated = "booking.updated"
booking_cancelled = "booking.cancelled"
payment_completed = "payment.completed"
payment_failed = "payment.failed"
invoice_created = "invoice.created"
invoice_paid = "invoice.paid"
user_created = "user.created"
user_updated = "user.updated"
class WebhookStatus(str, enum.Enum):
active = "active"
inactive = "inactive"
paused = "paused"
class WebhookDeliveryStatus(str, enum.Enum):
pending = "pending"
success = "success"
failed = "failed"
retrying = "retrying"
class Webhook(Base):
__tablename__ = 'webhooks'
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
name = Column(String(255), nullable=False)
url = Column(String(500), nullable=False)
secret = Column(String(255), nullable=False) # For signature verification
# Event subscriptions
events = Column(JSON, nullable=False) # List of event types
# Status
status = Column(Enum(WebhookStatus), default=WebhookStatus.active, nullable=False, index=True)
# Configuration
retry_count = Column(Integer, default=3, nullable=False)
timeout_seconds = Column(Integer, default=30, nullable=False)
# Metadata
description = Column(Text, nullable=True)
created_by = Column(Integer, ForeignKey('users.id'), nullable=True)
created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
# Relationships
creator = relationship('User', foreign_keys=[created_by])
class WebhookDelivery(Base):
__tablename__ = 'webhook_deliveries'
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
webhook_id = Column(Integer, ForeignKey('webhooks.id'), nullable=False, index=True)
event_type = Column(String(100), nullable=False, index=True)
event_id = Column(String(255), nullable=False, index=True)
# Delivery details
status = Column(Enum(WebhookDeliveryStatus), default=WebhookDeliveryStatus.pending, nullable=False, index=True)
payload = Column(JSON, nullable=False)
response_status = Column(Integer, nullable=True)
response_body = Column(Text, nullable=True)
error_message = Column(Text, nullable=True)
# Retry information
attempt_count = Column(Integer, default=0, nullable=False)
next_retry_at = Column(DateTime, nullable=True)
# Timestamps
delivered_at = Column(DateTime, nullable=True)
created_at = Column(DateTime, default=datetime.utcnow, nullable=False, index=True)
# Relationships
webhook = relationship('Webhook', foreign_keys=[webhook_id])

View File

@@ -0,0 +1,4 @@
"""
Integration routes for webhooks and API keys.
"""

View File

@@ -0,0 +1,165 @@
"""
API key management routes.
"""
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.orm import Session
from typing import List, Optional
from datetime import datetime
from ...shared.config.database import get_db
from ...shared.config.logging_config import get_logger
from ...security.middleware.auth import get_current_user, authorize_roles
from ...auth.models.user import User
from ..services.api_key_service import api_key_service
from ..models.api_key import APIKey
from ...shared.utils.response_helpers import success_response
from pydantic import BaseModel
logger = get_logger(__name__)
router = APIRouter(prefix='/api-keys', tags=['api-keys'])
class CreateAPIKeyRequest(BaseModel):
name: str
scopes: List[str]
description: Optional[str] = None
rate_limit: int = 100
expires_at: Optional[str] = None
class UpdateAPIKeyRequest(BaseModel):
name: Optional[str] = None
scopes: Optional[List[str]] = None
description: Optional[str] = None
rate_limit: Optional[int] = None
expires_at: Optional[str] = None
@router.post('/')
async def create_api_key(
key_data: CreateAPIKeyRequest,
current_user: User = Depends(authorize_roles('admin')),
db: Session = Depends(get_db)
):
"""Create a new API key."""
try:
expires_at = None
if key_data.expires_at:
expires_at = datetime.fromisoformat(key_data.expires_at.replace('Z', '+00:00'))
api_key, plain_key = api_key_service.create_api_key(
db=db,
name=key_data.name,
scopes=key_data.scopes,
created_by=current_user.id,
description=key_data.description,
rate_limit=key_data.rate_limit,
expires_at=expires_at
)
return success_response(
data={
'api_key': {
'id': api_key.id,
'name': api_key.name,
'key_prefix': api_key.key_prefix,
'scopes': api_key.scopes,
'rate_limit': api_key.rate_limit,
'expires_at': api_key.expires_at.isoformat() if api_key.expires_at else None
},
'key': plain_key # Return plain key only on creation
},
message='API key created successfully. Save this key securely - it will not be shown again.'
)
except Exception as e:
logger.error(f'Error creating API key: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.get('/')
async def get_api_keys(
current_user: User = Depends(authorize_roles('admin')),
db: Session = Depends(get_db)
):
"""Get all API keys."""
try:
api_keys = db.query(APIKey).order_by(APIKey.created_at.desc()).all()
return success_response(data={
'api_keys': [{
'id': k.id,
'name': k.name,
'key_prefix': k.key_prefix,
'scopes': k.scopes,
'rate_limit': k.rate_limit,
'is_active': k.is_active,
'last_used_at': k.last_used_at.isoformat() if k.last_used_at else None,
'expires_at': k.expires_at.isoformat() if k.expires_at else None,
'created_at': k.created_at.isoformat() if k.created_at else None
} for k in api_keys]
})
except Exception as e:
logger.error(f'Error getting API keys: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.put('/{key_id}')
async def update_api_key(
key_id: int,
key_data: UpdateAPIKeyRequest,
current_user: User = Depends(authorize_roles('admin')),
db: Session = Depends(get_db)
):
"""Update an API key."""
try:
expires_at = None
if key_data.expires_at:
expires_at = datetime.fromisoformat(key_data.expires_at.replace('Z', '+00:00'))
api_key = api_key_service.update_api_key(
db=db,
key_id=key_id,
name=key_data.name,
scopes=key_data.scopes,
description=key_data.description,
rate_limit=key_data.rate_limit,
expires_at=expires_at
)
if not api_key:
raise HTTPException(status_code=404, detail='API key not found')
return success_response(
data={
'api_key': {
'id': api_key.id,
'name': api_key.name,
'key_prefix': api_key.key_prefix,
'scopes': api_key.scopes,
'rate_limit': api_key.rate_limit,
'is_active': api_key.is_active,
'expires_at': api_key.expires_at.isoformat() if api_key.expires_at else None,
'created_at': api_key.created_at.isoformat() if api_key.created_at else None
}
},
message='API key updated successfully'
)
except HTTPException:
raise
except Exception as e:
logger.error(f'Error updating API key: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.delete('/{key_id}')
async def revoke_api_key(
key_id: int,
current_user: User = Depends(authorize_roles('admin')),
db: Session = Depends(get_db)
):
"""Revoke an API key."""
try:
success = api_key_service.revoke_api_key(db=db, key_id=key_id)
if not success:
raise HTTPException(status_code=404, detail='API key not found')
return success_response(message='API key revoked successfully')
except HTTPException:
raise
except Exception as e:
logger.error(f'Error revoking API key: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))

View File

@@ -0,0 +1,199 @@
"""
Webhook management routes.
"""
from fastapi import APIRouter, Depends, HTTPException, Query
from sqlalchemy.orm import Session
from typing import List, Optional
from ...shared.config.database import get_db
from ...shared.config.logging_config import get_logger
from ...security.middleware.auth import get_current_user, authorize_roles
from ...auth.models.user import User
from ..services.webhook_service import webhook_service
from ..models.webhook import Webhook, WebhookDelivery, WebhookEventType, WebhookStatus
from ...shared.utils.response_helpers import success_response
from pydantic import BaseModel, HttpUrl
logger = get_logger(__name__)
router = APIRouter(prefix='/webhooks', tags=['webhooks'])
class CreateWebhookRequest(BaseModel):
name: str
url: str
events: List[str]
description: Optional[str] = None
retry_count: int = 3
timeout_seconds: int = 30
@router.post('/')
async def create_webhook(
webhook_data: CreateWebhookRequest,
current_user: User = Depends(authorize_roles('admin')),
db: Session = Depends(get_db)
):
"""Create a new webhook."""
try:
webhook = webhook_service.create_webhook(
db=db,
name=webhook_data.name,
url=webhook_data.url,
events=webhook_data.events,
created_by=current_user.id,
description=webhook_data.description,
retry_count=webhook_data.retry_count,
timeout_seconds=webhook_data.timeout_seconds
)
return success_response(
data={'webhook': {
'id': webhook.id,
'name': webhook.name,
'url': webhook.url,
'events': webhook.events,
'status': webhook.status.value,
'secret': webhook.secret # Return secret only on creation
}},
message='Webhook created successfully'
)
except Exception as e:
logger.error(f'Error creating webhook: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.get('/')
async def get_webhooks(
current_user: User = Depends(authorize_roles('admin')),
db: Session = Depends(get_db)
):
"""Get all webhooks."""
try:
webhooks = db.query(Webhook).order_by(Webhook.created_at.desc()).all()
return success_response(data={
'webhooks': [{
'id': w.id,
'name': w.name,
'url': w.url,
'events': w.events,
'status': w.status.value,
'created_at': w.created_at.isoformat() if w.created_at else None
} for w in webhooks]
})
except Exception as e:
logger.error(f'Error getting webhooks: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.get('/{webhook_id}/deliveries')
async def get_webhook_deliveries(
webhook_id: int,
page: int = Query(1, ge=1),
limit: int = Query(50, ge=1, le=100),
current_user: User = Depends(authorize_roles('admin')),
db: Session = Depends(get_db)
):
"""Get webhook delivery history."""
try:
offset = (page - 1) * limit
deliveries = db.query(WebhookDelivery).filter(
WebhookDelivery.webhook_id == webhook_id
).order_by(WebhookDelivery.created_at.desc()).offset(offset).limit(limit).all()
total = db.query(WebhookDelivery).filter(
WebhookDelivery.webhook_id == webhook_id
).count()
return success_response(data={
'deliveries': [{
'id': d.id,
'event_type': d.event_type,
'status': d.status.value,
'response_status': d.response_status,
'attempt_count': d.attempt_count,
'created_at': d.created_at.isoformat() if d.created_at else None,
'delivered_at': d.delivered_at.isoformat() if d.delivered_at else None
} for d in deliveries],
'pagination': {
'page': page,
'limit': limit,
'total': total,
'total_pages': (total + limit - 1) // limit
}
})
except Exception as e:
logger.error(f'Error getting webhook deliveries: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
class UpdateWebhookRequest(BaseModel):
name: Optional[str] = None
url: Optional[str] = None
events: Optional[List[str]] = None
description: Optional[str] = None
status: Optional[str] = None
retry_count: Optional[int] = None
timeout_seconds: Optional[int] = None
@router.put('/{webhook_id}')
async def update_webhook(
webhook_id: int,
webhook_data: UpdateWebhookRequest,
current_user: User = Depends(authorize_roles('admin')),
db: Session = Depends(get_db)
):
"""Update a webhook."""
try:
status_enum = None
if webhook_data.status:
try:
status_enum = WebhookStatus(webhook_data.status)
except ValueError:
raise HTTPException(status_code=400, detail=f'Invalid status: {webhook_data.status}')
webhook = webhook_service.update_webhook(
db=db,
webhook_id=webhook_id,
name=webhook_data.name,
url=webhook_data.url,
events=webhook_data.events,
description=webhook_data.description,
status=status_enum,
retry_count=webhook_data.retry_count,
timeout_seconds=webhook_data.timeout_seconds
)
if not webhook:
raise HTTPException(status_code=404, detail='Webhook not found')
return success_response(
data={'webhook': {
'id': webhook.id,
'name': webhook.name,
'url': webhook.url,
'events': webhook.events,
'status': webhook.status.value,
'created_at': webhook.created_at.isoformat() if webhook.created_at else None
}},
message='Webhook updated successfully'
)
except HTTPException:
raise
except Exception as e:
logger.error(f'Error updating webhook: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.delete('/{webhook_id}')
async def delete_webhook(
webhook_id: int,
current_user: User = Depends(authorize_roles('admin')),
db: Session = Depends(get_db)
):
"""Delete a webhook."""
try:
success = webhook_service.delete_webhook(db=db, webhook_id=webhook_id)
if not success:
raise HTTPException(status_code=404, detail='Webhook not found')
return success_response(message='Webhook deleted successfully')
except HTTPException:
raise
except Exception as e:
logger.error(f'Error deleting webhook: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))

View File

@@ -0,0 +1,133 @@
"""
API key service for third-party access management.
"""
from sqlalchemy.orm import Session
from typing import List, Optional
from datetime import datetime
import secrets
import hashlib
from ..models.api_key import APIKey
from ...shared.config.logging_config import get_logger
logger = get_logger(__name__)
class APIKeyService:
"""Service for managing API keys."""
@staticmethod
def generate_api_key() -> tuple[str, str]:
"""Generate a new API key and return (key, hash)."""
# Generate key in format: hb_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
key = f"hb_{secrets.token_urlsafe(32)}"
key_hash = hashlib.sha256(key.encode('utf-8')).hexdigest()
key_prefix = key[:11] # "hb_" + 8 chars
return key, key_hash, key_prefix
@staticmethod
def create_api_key(
db: Session,
name: str,
scopes: List[str],
created_by: int,
description: Optional[str] = None,
rate_limit: int = 100,
expires_at: Optional[datetime] = None
) -> tuple[APIKey, str]:
"""Create a new API key and return (api_key_model, plain_key)."""
key, key_hash, key_prefix = APIKeyService.generate_api_key()
api_key = APIKey(
name=name,
key_hash=key_hash,
key_prefix=key_prefix,
scopes=scopes,
rate_limit=rate_limit,
created_by=created_by,
description=description,
expires_at=expires_at
)
db.add(api_key)
db.commit()
db.refresh(api_key)
logger.info(f'API key created: {api_key.id} - {name}')
return api_key, key # Return plain key only once
@staticmethod
def verify_api_key(db: Session, api_key: str) -> Optional[APIKey]:
"""Verify an API key and return the APIKey model if valid."""
key_hash = hashlib.sha256(api_key.encode('utf-8')).hexdigest()
api_key_model = db.query(APIKey).filter(
APIKey.key_hash == key_hash,
APIKey.is_active == True
).first()
if api_key_model and api_key_model.is_valid:
# Update last used timestamp
api_key_model.last_used_at = datetime.utcnow()
db.commit()
return api_key_model
return None
@staticmethod
def revoke_api_key(db: Session, key_id: int) -> bool:
"""Revoke an API key."""
api_key = db.query(APIKey).filter(APIKey.id == key_id).first()
if api_key:
api_key.is_active = False
db.commit()
logger.info(f'API key {key_id} revoked')
return True
return False
@staticmethod
def get_user_api_keys(
db: Session,
created_by: int,
active_only: bool = True
) -> List[APIKey]:
"""Get all API keys created by a user."""
query = db.query(APIKey).filter(APIKey.created_by == created_by)
if active_only:
query = query.filter(APIKey.is_active == True)
return query.order_by(APIKey.created_at.desc()).all()
@staticmethod
def update_api_key(
db: Session,
key_id: int,
name: Optional[str] = None,
scopes: Optional[List[str]] = None,
description: Optional[str] = None,
rate_limit: Optional[int] = None,
expires_at: Optional[datetime] = None
) -> Optional[APIKey]:
"""Update an API key."""
api_key = db.query(APIKey).filter(APIKey.id == key_id).first()
if not api_key:
return None
if name is not None:
api_key.name = name
if scopes is not None:
api_key.scopes = scopes
if description is not None:
api_key.description = description
if rate_limit is not None:
api_key.rate_limit = rate_limit
if expires_at is not None:
api_key.expires_at = expires_at
db.commit()
db.refresh(api_key)
logger.info(f'API key updated: {api_key.id} - {api_key.name}')
return api_key
api_key_service = APIKeyService()

View File

@@ -0,0 +1,218 @@
"""
Webhook service for external integrations.
"""
from sqlalchemy.orm import Session
from typing import List, Dict, Any, Optional
from datetime import datetime, timedelta
import secrets
import httpx
import hmac
import hashlib
import json
from ..models.webhook import Webhook, WebhookDelivery, WebhookEventType, WebhookStatus, WebhookDeliveryStatus
from ...shared.config.logging_config import get_logger
from ...analytics.services.audit_service import audit_service
logger = get_logger(__name__)
class WebhookService:
"""Service for managing webhooks."""
@staticmethod
def create_webhook(
db: Session,
name: str,
url: str,
events: List[str],
created_by: int,
description: Optional[str] = None,
retry_count: int = 3,
timeout_seconds: int = 30
) -> Webhook:
"""Create a new webhook."""
secret = secrets.token_urlsafe(32)
webhook = Webhook(
name=name,
url=url,
secret=secret,
events=events,
created_by=created_by,
description=description,
retry_count=retry_count,
timeout_seconds=timeout_seconds
)
db.add(webhook)
db.commit()
db.refresh(webhook)
logger.info(f'Webhook created: {webhook.id} - {name}')
return webhook
@staticmethod
def update_webhook(
db: Session,
webhook_id: int,
name: Optional[str] = None,
url: Optional[str] = None,
events: Optional[List[str]] = None,
description: Optional[str] = None,
status: Optional[WebhookStatus] = None,
retry_count: Optional[int] = None,
timeout_seconds: Optional[int] = None
) -> Optional[Webhook]:
"""Update a webhook."""
webhook = db.query(Webhook).filter(Webhook.id == webhook_id).first()
if not webhook:
return None
if name is not None:
webhook.name = name
if url is not None:
webhook.url = url
if events is not None:
webhook.events = events
if description is not None:
webhook.description = description
if status is not None:
webhook.status = status
if retry_count is not None:
webhook.retry_count = retry_count
if timeout_seconds is not None:
webhook.timeout_seconds = timeout_seconds
db.commit()
db.refresh(webhook)
logger.info(f'Webhook updated: {webhook.id} - {webhook.name}')
return webhook
@staticmethod
def delete_webhook(db: Session, webhook_id: int) -> bool:
"""Delete a webhook."""
webhook = db.query(Webhook).filter(Webhook.id == webhook_id).first()
if not webhook:
return False
db.delete(webhook)
db.commit()
logger.info(f'Webhook deleted: {webhook_id}')
return True
@staticmethod
def generate_signature(payload: str, secret: str) -> str:
"""Generate HMAC signature for webhook payload."""
return hmac.new(
secret.encode('utf-8'),
payload.encode('utf-8'),
hashlib.sha256
).hexdigest()
@staticmethod
async def deliver_webhook(
db: Session,
event_type: str,
event_id: str,
payload: Dict[str, Any]
) -> List[WebhookDelivery]:
"""Deliver webhook event to all subscribed webhooks."""
# Find active webhooks subscribed to this event
webhooks = db.query(Webhook).filter(
Webhook.status == WebhookStatus.active,
Webhook.events.contains([event_type])
).all()
deliveries = []
for webhook in webhooks:
delivery = WebhookDelivery(
webhook_id=webhook.id,
event_type=event_type,
event_id=event_id,
payload=payload,
status=WebhookDeliveryStatus.pending
)
db.add(delivery)
db.commit()
db.refresh(delivery)
# Deliver asynchronously
try:
await WebhookService._send_webhook(webhook, delivery, payload)
except Exception as e:
logger.error(f'Error delivering webhook {delivery.id}: {str(e)}')
deliveries.append(delivery)
return deliveries
@staticmethod
async def _send_webhook(
webhook: Webhook,
delivery: WebhookDelivery,
payload: Dict[str, Any]
):
"""Send webhook HTTP request."""
payload_str = json.dumps(payload)
signature = WebhookService.generate_signature(payload_str, webhook.secret)
headers = {
'Content-Type': 'application/json',
'X-Webhook-Signature': signature,
'X-Webhook-Event': delivery.event_type,
'X-Webhook-Id': str(delivery.id)
}
try:
async with httpx.AsyncClient(timeout=webhook.timeout_seconds) as client:
response = await client.post(
webhook.url,
content=payload_str,
headers=headers
)
delivery.response_status = response.status_code
delivery.response_body = response.text[:1000] # Limit response body size
delivery.attempt_count += 1
if response.status_code >= 200 and response.status_code < 300:
delivery.status = WebhookDeliveryStatus.success
delivery.delivered_at = datetime.utcnow()
else:
delivery.status = WebhookDeliveryStatus.failed
delivery.error_message = f'HTTP {response.status_code}'
# Schedule retry if attempts remaining
if delivery.attempt_count < webhook.retry_count:
delivery.status = WebhookDeliveryStatus.retrying
delivery.next_retry_at = datetime.utcnow() + timedelta(
minutes=2 ** delivery.attempt_count # Exponential backoff
)
except Exception as e:
delivery.status = WebhookDeliveryStatus.failed
delivery.error_message = str(e)[:500]
delivery.attempt_count += 1
# Schedule retry if attempts remaining
if delivery.attempt_count < webhook.retry_count:
delivery.status = WebhookDeliveryStatus.retrying
delivery.next_retry_at = datetime.utcnow() + timedelta(
minutes=2 ** delivery.attempt_count
)
# Update delivery in database
from ...shared.config.database import SessionLocal
db = SessionLocal()
try:
db.merge(delivery)
db.commit()
except Exception as e:
logger.error(f'Error updating webhook delivery: {str(e)}')
db.rollback()
finally:
db.close()
webhook_service = WebhookService()

View File

@@ -49,6 +49,9 @@ from .content.routes import privacy_routes
app = FastAPI(title=settings.APP_NAME, description='Enterprise-grade Hotel Booking API', version=settings.APP_VERSION, docs_url='/api/docs' if not settings.is_production else None, redoc_url='/api/redoc' if not settings.is_production else None, openapi_url='/api/openapi.json' if not settings.is_production else None)
app.add_middleware(RequestIDMiddleware)
app.add_middleware(CookieConsentMiddleware)
# Add API versioning middleware
from .shared.middleware.api_versioning import APIVersioningMiddleware
app.add_middleware(APIVersioningMiddleware, default_version='v1')
if settings.REQUEST_TIMEOUT > 0:
app.add_middleware(TimeoutMiddleware)
app.add_middleware(SecurityHeadersMiddleware)
@@ -61,10 +64,12 @@ if settings.IP_WHITELIST_ENABLED:
app.add_middleware(AdminIPWhitelistMiddleware)
logger.info(f'Admin IP whitelisting enabled with {len(settings.ADMIN_IP_WHITELIST)} IP(s)/CIDR range(s)')
if settings.RATE_LIMIT_ENABLED:
limiter = Limiter(key_func=get_remote_address, default_limits=[f'{settings.RATE_LIMIT_PER_MINUTE}/minute'])
# Use role-based rate limiting
from .security.middleware.role_based_rate_limit import create_role_based_limiter
limiter = create_role_based_limiter()
app.state.limiter = limiter
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
logger.info(f'Rate limiting enabled: {settings.RATE_LIMIT_PER_MINUTE} requests/minute')
logger.info(f'Role-based rate limiting enabled: Admin={settings.RATE_LIMIT_ADMIN_PER_MINUTE}/min, Staff={settings.RATE_LIMIT_STAFF_PER_MINUTE}/min, Accountant={settings.RATE_LIMIT_ACCOUNTANT_PER_MINUTE}/min, Customer={settings.RATE_LIMIT_CUSTOMER_PER_MINUTE}/min, Default={settings.RATE_LIMIT_PER_MINUTE}/min')
# CORS middleware must be added LAST to handle OPTIONS preflight requests before other middleware
# In FastAPI/Starlette, middleware is executed in reverse order (last added = first executed = outermost)
@@ -211,8 +216,11 @@ from .guest_management.routes.complaint_routes import router as complaint_routes
from .notifications.routes import chat_routes, notification_routes, email_campaign_routes
from .analytics.routes import analytics_routes, report_routes, audit_routes
from .security.routes import security_routes, compliance_routes
from .system.routes import system_settings_routes, workflow_routes, task_routes
from .system.routes import system_settings_routes, workflow_routes, task_routes, approval_routes, backup_routes
from .ai.routes import ai_assistant_routes
from .compliance.routes import gdpr_routes
from .integrations.routes import webhook_routes, api_key_routes
from .auth.routes import session_routes
# Register all routes with /api prefix (removed duplicate registrations)
# Using /api prefix as standard, API versioning can be handled via headers if needed
@@ -264,6 +272,12 @@ app.include_router(email_campaign_routes.router, prefix=api_prefix)
app.include_router(page_content_routes.router, prefix=api_prefix)
app.include_router(blog_routes.router, prefix=api_prefix)
app.include_router(ai_assistant_routes.router, prefix=api_prefix)
app.include_router(approval_routes.router, prefix=api_prefix)
app.include_router(gdpr_routes.router, prefix=api_prefix)
app.include_router(webhook_routes.router, prefix=api_prefix)
app.include_router(api_key_routes.router, prefix=api_prefix)
app.include_router(session_routes.router, prefix=api_prefix)
app.include_router(backup_routes.router, prefix=api_prefix)
logger.info('All routes registered successfully')
def ensure_jwt_secret():

View File

@@ -98,7 +98,7 @@ async def create_invoice(request: Request, invoice_data: CreateInvoiceRequest, c
raise HTTPException(status_code=500, detail=str(e))
@router.put('/{id}')
async def update_invoice(request: Request, id: int, invoice_data: UpdateInvoiceRequest, current_user: User=Depends(authorize_roles('admin', 'staff', 'accountant')), db: Session=Depends(get_db)):
async def update_invoice(request: Request, id: int, invoice_data: UpdateInvoiceRequest, current_user: User=Depends(authorize_roles('admin', 'accountant')), db: Session=Depends(get_db)):
try:
invoice = db.query(Invoice).filter(Invoice.id == id).first()
if not invoice:
@@ -119,7 +119,7 @@ async def update_invoice(request: Request, id: int, invoice_data: UpdateInvoiceR
raise HTTPException(status_code=500, detail=str(e))
@router.post('/{id}/mark-paid')
async def mark_invoice_as_paid(request: Request, id: int, payment_data: MarkInvoicePaidRequest, current_user: User=Depends(authorize_roles('admin', 'staff', 'accountant')), db: Session=Depends(get_db)):
async def mark_invoice_as_paid(request: Request, id: int, payment_data: MarkInvoicePaidRequest, current_user: User=Depends(authorize_roles('admin', 'accountant')), db: Session=Depends(get_db)):
try:
request_id = get_request_id(request)
amount = payment_data.amount

View File

@@ -3,6 +3,7 @@ from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from jose import JWTError, jwt
from sqlalchemy.orm import Session
from typing import Optional
from datetime import datetime
import os
from ...shared.config.database import get_db
from ...shared.config.settings import settings
@@ -15,26 +16,39 @@ def get_jwt_secret() -> str:
Get JWT secret securely, fail if not configured.
Never use hardcoded fallback secrets.
"""
default_secret = 'dev-secret-key-change-in-production-12345'
# Remove default secret entirely - fail fast if not configured
jwt_secret = getattr(settings, 'JWT_SECRET', None) or os.getenv('JWT_SECRET', None)
# Fail fast if secret is not configured or using default value
if not jwt_secret or jwt_secret == default_secret:
if settings.is_production:
raise ValueError(
'CRITICAL: JWT_SECRET is not properly configured in production. '
'Please set JWT_SECRET environment variable to a secure random string.'
)
# In development, warn but allow (startup validation should catch this)
import warnings
warnings.warn(
f'JWT_SECRET not configured. Using settings value but this is insecure. '
f'Set JWT_SECRET environment variable.',
UserWarning
# Fail fast if secret is not configured
if not jwt_secret:
error_msg = (
'CRITICAL: JWT_SECRET is not configured. '
'Please set JWT_SECRET environment variable to a secure random string (minimum 32 characters).'
)
jwt_secret = getattr(settings, 'JWT_SECRET', None)
if not jwt_secret:
raise ValueError('JWT_SECRET must be configured')
import logging
logger = logging.getLogger(__name__)
logger.error(error_msg)
if settings.is_production:
raise ValueError(error_msg)
else:
# In development, generate a secure secret but warn
import secrets
jwt_secret = secrets.token_urlsafe(64)
logger.warning(
f'JWT_SECRET not configured. Auto-generated secret for development. '
f'Set JWT_SECRET environment variable for production: {jwt_secret}'
)
# Validate JWT secret strength
if len(jwt_secret) < 32:
error_msg = 'JWT_SECRET must be at least 32 characters long for security.'
import logging
logger = logging.getLogger(__name__)
logger.error(error_msg)
if settings.is_production:
raise ValueError(error_msg)
else:
logger.warning(error_msg)
return jwt_secret
@@ -80,6 +94,22 @@ def get_current_user(
user = db.query(User).filter(User.id == user_id).first()
if user is None:
raise credentials_exception
# Check if user account is active
if not user.is_active:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail='Account is disabled. Please contact support.'
)
# Check if account is locked
if user.locked_until and user.locked_until > datetime.utcnow():
remaining_minutes = int((user.locked_until - datetime.utcnow()).total_seconds() / 60)
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=f'Account is temporarily locked due to multiple failed login attempts. Please try again in {remaining_minutes} minute(s).'
)
return user
def authorize_roles(*allowed_roles: str):
@@ -102,7 +132,8 @@ def get_current_user_optional(
) -> Optional[User]:
"""
Get current user optionally from either Authorization header or httpOnly cookie.
Returns None if no valid token is found.
Returns None if no valid token is found, or if user is inactive/locked.
This ensures inactive/locked users are never considered "authenticated" even for optional features.
"""
# Try to get token from Authorization header first
token = None
@@ -124,7 +155,19 @@ def get_current_user_optional(
return None
except (JWTError, ValueError):
return None
user = db.query(User).filter(User.id == user_id).first()
if user is None:
return None
# Check if user account is active - return None for inactive users
if not user.is_active:
return None
# Check if account is locked - return None for locked users
if user.locked_until and user.locked_until > datetime.utcnow():
return None
return user
def verify_token(token: str) -> dict:

View File

@@ -0,0 +1,47 @@
"""
Role-based rate limiting middleware.
"""
from slowapi import Limiter
from slowapi.util import get_remote_address
from typing import Callable
from fastapi import Request
from ...shared.config.settings import settings
from ...shared.config.logging_config import get_logger
logger = get_logger(__name__)
def get_rate_limit_key(request: Request) -> str:
"""Get rate limit key based on user role if authenticated, otherwise IP."""
# Try to get user from request state (set by auth middleware)
if hasattr(request.state, 'user') and request.state.user:
user = request.state.user
# Get user role
role = getattr(user, 'role', None)
if role:
role_name = getattr(role, 'name', None)
if role_name:
# Return role-based key
return f"rate_limit:{role_name}:{user.id}"
# Fall back to IP-based limiting
return get_remote_address(request)
def get_rate_limit_for_role(role_name: str) -> str:
"""Get rate limit string for a specific role."""
limits = {
'admin': settings.RATE_LIMIT_ADMIN_PER_MINUTE,
'staff': settings.RATE_LIMIT_STAFF_PER_MINUTE,
'accountant': settings.RATE_LIMIT_ACCOUNTANT_PER_MINUTE,
'customer': settings.RATE_LIMIT_CUSTOMER_PER_MINUTE,
}
limit = limits.get(role_name, settings.RATE_LIMIT_PER_MINUTE)
return f"{limit}/minute"
def create_role_based_limiter() -> Limiter:
"""Create a role-based rate limiter."""
return Limiter(
key_func=get_rate_limit_key,
default_limits=[f'{settings.RATE_LIMIT_PER_MINUTE}/minute']
)

View File

@@ -28,6 +28,10 @@ class Settings(BaseSettings):
CORS_ORIGINS: List[str] = Field(default_factory=lambda: ['http://localhost:5173', 'http://localhost:3000', 'http://127.0.0.1:5173'], description='Allowed CORS origins')
RATE_LIMIT_ENABLED: bool = Field(default=True, description='Enable rate limiting')
RATE_LIMIT_PER_MINUTE: int = Field(default=60, description='Requests per minute per IP')
RATE_LIMIT_ADMIN_PER_MINUTE: int = Field(default=300, description='Requests per minute for admin users')
RATE_LIMIT_STAFF_PER_MINUTE: int = Field(default=200, description='Requests per minute for staff users')
RATE_LIMIT_ACCOUNTANT_PER_MINUTE: int = Field(default=200, description='Requests per minute for accountant users')
RATE_LIMIT_CUSTOMER_PER_MINUTE: int = Field(default=100, description='Requests per minute for customer users')
CSRF_PROTECTION_ENABLED: bool = Field(default=True, description='Enable CSRF protection')
HSTS_PRELOAD_ENABLED: bool = Field(default=False, description='Enable HSTS preload directive (requires domain submission to hstspreload.org)')
LOG_LEVEL: str = Field(default='INFO', description='Log level: DEBUG, INFO, WARNING, ERROR, CRITICAL')

View File

@@ -0,0 +1,51 @@
"""
API versioning middleware for backward compatibility.
"""
from fastapi import Request, HTTPException, status
from starlette.middleware.base import BaseHTTPMiddleware
from starlette.responses import Response
from typing import Callable
import re
from ...shared.config.logging_config import get_logger
logger = get_logger(__name__)
class APIVersioningMiddleware(BaseHTTPMiddleware):
"""Middleware to handle API versioning."""
def __init__(self, app, default_version: str = "v1"):
super().__init__(app)
self.default_version = default_version
self.version_pattern = re.compile(r'/api/v(\d+)/')
async def dispatch(self, request: Request, call_next: Callable) -> Response:
"""Process request and handle versioning."""
path = request.url.path
# Check if path starts with /api
if path.startswith('/api'):
# Extract version from path
version_match = self.version_pattern.search(path)
if version_match:
version = version_match.group(1)
# Store version in request state
request.state.api_version = f"v{version}"
# Remove version from path for routing
new_path = self.version_pattern.sub('/api/', path)
request.scope['path'] = new_path
elif path.startswith('/api/') and not path.startswith('/api/v'):
# No version specified, use default
request.state.api_version = self.default_version
else:
# Health check or other non-versioned endpoints
request.state.api_version = None
response = await call_next(request)
# Add version header to response
if hasattr(request.state, 'api_version') and request.state.api_version:
response.headers['X-API-Version'] = request.state.api_version
return response

View File

@@ -0,0 +1,111 @@
"""
Decorator for automatic audit logging of financial operations.
"""
from functools import wraps
from typing import Callable, Any
from fastapi import Request
from sqlalchemy.orm import Session
from ...analytics.services.audit_service import audit_service
from ...shared.config.logging_config import get_logger
logger = get_logger(__name__)
def audit_financial_operation(action: str, resource_type: str):
"""
Decorator to automatically log financial operations to audit trail.
Usage:
@audit_financial_operation('payment_refunded', 'payment')
async def refund_payment(...):
...
"""
def decorator(func: Callable) -> Callable:
@wraps(func)
async def wrapper(*args, **kwargs):
# Extract request and db from function arguments
request: Request = None
db: Session = None
current_user = None
# Find request and db in kwargs or args
for arg in list(args) + list(kwargs.values()):
if isinstance(arg, Request):
request = arg
elif isinstance(arg, Session):
db = arg
elif hasattr(arg, 'id') and hasattr(arg, 'email'): # Likely a User object
current_user = arg
# Get request from kwargs if not found
if not request and 'request' in kwargs:
request = kwargs['request']
if not db and 'db' in kwargs:
db = kwargs['db']
if not current_user and 'current_user' in kwargs:
current_user = kwargs['current_user']
# Extract resource_id from function arguments if available
resource_id = None
if 'id' in kwargs:
resource_id = kwargs['id']
elif len(args) > 0 and isinstance(args[0], int):
resource_id = args[0]
# Get client info
client_ip = None
user_agent = None
request_id = None
if request:
client_ip = request.client.host if request.client else None
user_agent = request.headers.get('User-Agent')
request_id = getattr(request.state, 'request_id', None)
try:
# Execute the function
result = await func(*args, **kwargs)
# Log successful operation
if db and current_user:
await audit_service.log_action(
db=db,
action=action,
resource_type=resource_type,
user_id=current_user.id if current_user else None,
resource_id=resource_id,
ip_address=client_ip,
user_agent=user_agent,
request_id=request_id,
details={
'function': func.__name__,
'result': 'success'
},
status='success'
)
return result
except Exception as e:
# Log failed operation
if db and current_user:
await audit_service.log_action(
db=db,
action=action,
resource_type=resource_type,
user_id=current_user.id if current_user else None,
resource_id=resource_id,
ip_address=client_ip,
user_agent=user_agent,
request_id=request_id,
details={
'function': func.__name__,
'result': 'failed',
'error': str(e)
},
status='failed',
error_message=str(e)
)
raise
return wrapper
return decorator

View File

@@ -0,0 +1,59 @@
"""
Approval workflow models for enterprise financial operations.
"""
from sqlalchemy import Column, Integer, String, Text, DateTime, ForeignKey, Enum, JSON
from sqlalchemy.orm import relationship
from datetime import datetime
import enum
from ...shared.config.database import Base
class ApprovalStatus(str, enum.Enum):
pending = "pending"
approved = "approved"
rejected = "rejected"
cancelled = "cancelled"
class ApprovalType(str, enum.Enum):
invoice_update = "invoice_update"
payment_refund = "payment_refund"
invoice_mark_paid = "invoice_mark_paid"
financial_adjustment = "financial_adjustment"
user_role_change = "user_role_change"
large_transaction = "large_transaction"
class ApprovalRequest(Base):
__tablename__ = 'approval_requests'
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
approval_type = Column(Enum(ApprovalType), nullable=False, index=True)
status = Column(Enum(ApprovalStatus), default=ApprovalStatus.pending, nullable=False, index=True)
# Who requested
requested_by = Column(Integer, ForeignKey('users.id'), nullable=False, index=True)
requested_at = Column(DateTime, default=datetime.utcnow, nullable=False)
# Who approved/rejected
approved_by = Column(Integer, ForeignKey('users.id'), nullable=True, index=True)
approved_at = Column(DateTime, nullable=True)
rejection_reason = Column(Text, nullable=True)
# Resource being approved
resource_type = Column(String(50), nullable=False, index=True)
resource_id = Column(Integer, nullable=False, index=True)
# Request details
request_data = Column(JSON, nullable=True)
current_data = Column(JSON, nullable=True) # Snapshot of current state
# Metadata
priority = Column(String(20), default='normal') # low, normal, high, urgent
notes = Column(Text, nullable=True)
extra_metadata = Column(JSON, nullable=True) # Renamed from 'metadata' as it's reserved in SQLAlchemy
# Relationships
requester = relationship('User', foreign_keys=[requested_by])
approver = relationship('User', foreign_keys=[approved_by])
created_at = Column(DateTime, default=datetime.utcnow, nullable=False, index=True)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)

View File

@@ -0,0 +1,200 @@
"""
Approval workflow routes.
"""
from fastapi import APIRouter, Depends, HTTPException, Query
from sqlalchemy.orm import Session
from typing import Optional
from ...shared.config.database import get_db
from ...shared.config.logging_config import get_logger
from ...security.middleware.auth import get_current_user, authorize_roles
from ...auth.models.user import User
from ...system.services.approval_service import approval_service
from ...system.models.approval_workflow import ApprovalRequest, ApprovalStatus, ApprovalType
from ...shared.utils.response_helpers import success_response
logger = get_logger(__name__)
router = APIRouter(prefix='/approvals', tags=['approvals'])
@router.get('/pending')
async def get_pending_approvals(
approval_type: Optional[str] = Query(None),
page: int = Query(1, ge=1),
limit: int = Query(50, ge=1, le=100),
current_user: User = Depends(authorize_roles('admin', 'accountant')),
db: Session = Depends(get_db)
):
"""Get pending approval requests."""
try:
approval_type_enum = None
if approval_type:
try:
approval_type_enum = ApprovalType(approval_type)
except ValueError:
raise HTTPException(status_code=400, detail=f'Invalid approval_type: {approval_type}')
offset = (page - 1) * limit
approvals = approval_service.get_pending_approvals(
db=db,
approval_type=approval_type_enum,
limit=limit,
offset=offset
)
return success_response(data={
'approvals': [{
'id': app.id,
'approval_type': app.approval_type.value,
'status': app.status.value,
'resource_type': app.resource_type,
'resource_id': app.resource_id,
'requested_by': app.requested_by,
'requested_at': app.requested_at.isoformat() if app.requested_at else None,
'priority': app.priority,
'notes': app.notes,
'request_data': app.request_data,
'current_data': app.current_data
} for app in approvals]
})
except HTTPException:
raise
except Exception as e:
logger.error(f'Error getting pending approvals: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.post('/{request_id}/approve')
async def approve_request(
request_id: int,
notes: Optional[str] = None,
current_user: User = Depends(authorize_roles('admin', 'accountant')),
db: Session = Depends(get_db)
):
"""Approve an approval request."""
try:
approval = await approval_service.approve_request(
db=db,
request_id=request_id,
approved_by=current_user.id,
notes=notes
)
return success_response(
data={'approval': {
'id': approval.id,
'status': approval.status.value,
'approved_by': approval.approved_by,
'approved_at': approval.approved_at.isoformat() if approval.approved_at else None
}},
message='Approval request approved successfully'
)
except ValueError as e:
raise HTTPException(status_code=400, detail=str(e))
except Exception as e:
logger.error(f'Error approving request: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.post('/{request_id}/reject')
async def reject_request(
request_id: int,
rejection_reason: str,
current_user: User = Depends(authorize_roles('admin', 'accountant')),
db: Session = Depends(get_db)
):
"""Reject an approval request."""
try:
approval = await approval_service.reject_request(
db=db,
request_id=request_id,
rejected_by=current_user.id,
rejection_reason=rejection_reason
)
return success_response(
data={'approval': {
'id': approval.id,
'status': approval.status.value,
'rejection_reason': approval.rejection_reason
}},
message='Approval request rejected'
)
except ValueError as e:
raise HTTPException(status_code=400, detail=str(e))
except Exception as e:
logger.error(f'Error rejecting request: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.get('/my-requests')
async def get_my_approval_requests(
status: Optional[str] = Query(None),
page: int = Query(1, ge=1),
limit: int = Query(50, ge=1, le=100),
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Get current user's approval requests."""
try:
status_enum = None
if status:
try:
status_enum = ApprovalStatus(status)
except ValueError:
raise HTTPException(status_code=400, detail=f'Invalid status: {status}')
offset = (page - 1) * limit
approvals = approval_service.get_user_approvals(
db=db,
user_id=current_user.id,
status=status_enum,
limit=limit,
offset=offset
)
return success_response(data={
'approvals': [{
'id': app.id,
'approval_type': app.approval_type.value,
'status': app.status.value,
'resource_type': app.resource_type,
'resource_id': app.resource_id,
'requested_at': app.requested_at.isoformat() if app.requested_at else None,
'approved_at': app.approved_at.isoformat() if app.approved_at else None,
'rejection_reason': app.rejection_reason
} for app in approvals]
})
except HTTPException:
raise
except Exception as e:
logger.error(f'Error getting user approvals: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.delete('/{request_id}')
async def cancel_approval_request(
request_id: int,
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Cancel an approval request (only by requester or admin)."""
try:
approval = db.query(ApprovalRequest).filter(ApprovalRequest.id == request_id).first()
if not approval:
raise HTTPException(status_code=404, detail='Approval request not found')
# Only requester or admin can cancel
if approval.requested_by != current_user.id and current_user.role.name != 'admin':
raise HTTPException(status_code=403, detail='Not authorized to cancel this request')
# Only pending requests can be cancelled
if approval.status != ApprovalStatus.pending:
raise HTTPException(status_code=400, detail='Only pending requests can be cancelled')
approval.status = ApprovalStatus.cancelled
db.commit()
logger.info(f'Approval request {request_id} cancelled by user {current_user.id}')
return success_response(message='Approval request cancelled successfully')
except HTTPException:
raise
except Exception as e:
logger.error(f'Error cancelling approval request: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))

View File

@@ -0,0 +1,231 @@
"""
Approval workflow service for enterprise financial operations.
"""
from sqlalchemy.orm import Session
from typing import Optional, Dict, Any, List
from datetime import datetime
from ..models.approval_workflow import ApprovalRequest, ApprovalStatus, ApprovalType
from ...auth.models.user import User
from ...shared.config.logging_config import get_logger
from ...analytics.services.audit_service import audit_service
logger = get_logger(__name__)
class ApprovalService:
"""Service for managing approval workflows."""
# Thresholds for automatic approval requirements
REFUND_THRESHOLD = 1000.0 # Require approval for refunds > $1000
INVOICE_UPDATE_THRESHOLD = 500.0 # Require approval for invoice updates > $500
LARGE_TRANSACTION_THRESHOLD = 5000.0 # Require approval for transactions > $5000
@staticmethod
def requires_approval(
approval_type: ApprovalType,
amount: Optional[float] = None,
user_role: Optional[str] = None
) -> bool:
"""Check if an action requires approval."""
# Admin and accountant can auto-approve their own actions
if user_role in ['admin', 'accountant']:
return False
# Staff always requires approval for financial operations
if user_role == 'staff':
if approval_type in [
ApprovalType.invoice_update,
ApprovalType.payment_refund,
ApprovalType.invoice_mark_paid,
ApprovalType.financial_adjustment
]:
return True
# Check amount thresholds
if amount:
if approval_type == ApprovalType.payment_refund and amount > ApprovalService.REFUND_THRESHOLD:
return True
if approval_type == ApprovalType.invoice_update and amount > ApprovalService.INVOICE_UPDATE_THRESHOLD:
return True
if approval_type == ApprovalType.large_transaction and amount > ApprovalService.LARGE_TRANSACTION_THRESHOLD:
return True
return False
@staticmethod
async def create_approval_request(
db: Session,
approval_type: ApprovalType,
resource_type: str,
resource_id: int,
requested_by: int,
request_data: Dict[str, Any],
current_data: Optional[Dict[str, Any]] = None,
priority: str = 'normal',
notes: Optional[str] = None,
metadata: Optional[Dict[str, Any]] = None
) -> ApprovalRequest:
"""Create a new approval request."""
approval_request = ApprovalRequest(
approval_type=approval_type,
status=ApprovalStatus.pending,
requested_by=requested_by,
resource_type=resource_type,
resource_id=resource_id,
request_data=request_data,
current_data=current_data,
priority=priority,
notes=notes,
extra_metadata=metadata
)
db.add(approval_request)
db.commit()
db.refresh(approval_request)
# Log approval request creation
await audit_service.log_action(
db=db,
action='approval_request_created',
resource_type='approval_request',
user_id=requested_by,
resource_id=approval_request.id,
details={
'approval_type': approval_type.value,
'resource_type': resource_type,
'resource_id': resource_id,
'priority': priority
},
status='success'
)
logger.info(f'Approval request created: {approval_request.id} for {approval_type.value}')
return approval_request
@staticmethod
async def approve_request(
db: Session,
request_id: int,
approved_by: int,
notes: Optional[str] = None
) -> ApprovalRequest:
"""Approve an approval request."""
approval_request = db.query(ApprovalRequest).filter(
ApprovalRequest.id == request_id
).first()
if not approval_request:
raise ValueError('Approval request not found')
if approval_request.status != ApprovalStatus.pending:
raise ValueError(f'Approval request is already {approval_request.status.value}')
approval_request.status = ApprovalStatus.approved
approval_request.approved_by = approved_by
approval_request.approved_at = datetime.utcnow()
if notes:
approval_request.notes = notes
db.commit()
db.refresh(approval_request)
# Log approval
await audit_service.log_action(
db=db,
action='approval_request_approved',
resource_type='approval_request',
user_id=approved_by,
resource_id=request_id,
details={
'approval_type': approval_request.approval_type.value,
'resource_type': approval_request.resource_type,
'resource_id': approval_request.resource_id
},
status='success'
)
logger.info(f'Approval request {request_id} approved by user {approved_by}')
return approval_request
@staticmethod
async def reject_request(
db: Session,
request_id: int,
rejected_by: int,
rejection_reason: str
) -> ApprovalRequest:
"""Reject an approval request."""
approval_request = db.query(ApprovalRequest).filter(
ApprovalRequest.id == request_id
).first()
if not approval_request:
raise ValueError('Approval request not found')
if approval_request.status != ApprovalStatus.pending:
raise ValueError(f'Approval request is already {approval_request.status.value}')
approval_request.status = ApprovalStatus.rejected
approval_request.approved_by = rejected_by
approval_request.approved_at = datetime.utcnow()
approval_request.rejection_reason = rejection_reason
db.commit()
db.refresh(approval_request)
# Log rejection
await audit_service.log_action(
db=db,
action='approval_request_rejected',
resource_type='approval_request',
user_id=rejected_by,
resource_id=request_id,
details={
'approval_type': approval_request.approval_type.value,
'rejection_reason': rejection_reason
},
status='success'
)
logger.info(f'Approval request {request_id} rejected by user {rejected_by}')
return approval_request
@staticmethod
def get_pending_approvals(
db: Session,
approval_type: Optional[ApprovalType] = None,
limit: int = 50,
offset: int = 0
) -> List[ApprovalRequest]:
"""Get pending approval requests."""
query = db.query(ApprovalRequest).filter(
ApprovalRequest.status == ApprovalStatus.pending
)
if approval_type:
query = query.filter(ApprovalRequest.approval_type == approval_type)
return query.order_by(
ApprovalRequest.priority.desc(),
ApprovalRequest.requested_at.asc()
).offset(offset).limit(limit).all()
@staticmethod
def get_user_approvals(
db: Session,
user_id: int,
status: Optional[ApprovalStatus] = None,
limit: int = 50,
offset: int = 0
) -> List[ApprovalRequest]:
"""Get approval requests for a user."""
query = db.query(ApprovalRequest).filter(
ApprovalRequest.requested_by == user_id
)
if status:
query = query.filter(ApprovalRequest.status == status)
return query.order_by(ApprovalRequest.created_at.desc()).offset(offset).limit(limit).all()
approval_service = ApprovalService()

View File

@@ -0,0 +1,240 @@
"""
Data backup service for enterprise data protection.
"""
from sqlalchemy.orm import Session
from typing import Optional, Dict, Any
from datetime import datetime, timedelta
from pathlib import Path
import json
import subprocess
import os
import shutil
from ...shared.config.settings import settings
from ...shared.config.logging_config import get_logger
logger = get_logger(__name__)
def is_running_in_docker() -> bool:
"""Check if the application is running inside a Docker container."""
# Check for .dockerenv file (most reliable method)
if os.path.exists('/.dockerenv'):
return True
# Check for Docker in cgroup
try:
with open('/proc/self/cgroup', 'r') as f:
if 'docker' in f.read():
return True
except (FileNotFoundError, IOError):
pass
# Check environment variable
if os.environ.get('DOCKER_CONTAINER') == 'true':
return True
return False
class BackupService:
"""Service for managing database backups."""
BACKUP_DIR = Path('backups')
RETENTION_DAYS = 30 # Keep backups for 30 days
@staticmethod
def ensure_backup_dir():
"""Ensure backup directory exists."""
BackupService.BACKUP_DIR.mkdir(parents=True, exist_ok=True)
@staticmethod
def check_mysqldump_available() -> bool:
"""Check if mysqldump is available on the system."""
return shutil.which('mysqldump') is not None
@staticmethod
def create_backup(db_name: str, db_user: str, db_password: str, db_host: str, db_port: str) -> Dict[str, Any]:
"""
Create a database backup.
Returns backup metadata.
"""
BackupService.ensure_backup_dir()
# Check if mysqldump is available
if not BackupService.check_mysqldump_available():
error_msg = (
"mysqldump is not installed or not in PATH. "
"Please install MySQL client tools: "
"Ubuntu/Debian: sudo apt-get install mysql-client "
"or CentOS/RHEL: sudo yum install mysql"
)
logger.error(error_msg)
raise FileNotFoundError(error_msg)
timestamp = datetime.utcnow().strftime('%Y%m%d_%H%M%S')
backup_filename = f'backup_{db_name}_{timestamp}.sql'
backup_path = BackupService.BACKUP_DIR / backup_filename
try:
# Detect Docker environment
in_docker = is_running_in_docker()
# Determine the correct host to use
# In Docker, if host is localhost, try to use the MySQL service name or host.docker.internal
actual_host = db_host
if in_docker:
if db_host in ('localhost', '127.0.0.1'):
# Try to use MySQL service name from docker-compose (common names)
mysql_service_name = os.environ.get('MYSQL_SERVICE_NAME', 'mysql')
actual_host = mysql_service_name
logger.info(f"Running in Docker, using MySQL service name: {actual_host}")
else:
logger.info(f"Running in Docker, using configured host: {actual_host}")
else:
logger.info(f"Running on host, using configured host: {actual_host}")
# Use mysqldump for MySQL/MariaDB
# Note: In production, use proper credentials management
env = os.environ.copy()
if db_password:
env['MYSQL_PWD'] = db_password
# Get full path to mysqldump
mysqldump_path = shutil.which('mysqldump')
if not mysqldump_path:
raise FileNotFoundError("mysqldump not found in PATH")
# Build mysqldump command
# Always use TCP protocol to avoid socket issues, especially in Docker
cmd = [
mysqldump_path,
'--protocol=TCP', # Force TCP/IP connection
f'--user={db_user}',
f'--host={actual_host}',
f'--port={db_port}',
'--single-transaction',
'--routines',
'--triggers',
'--skip-lock-tables', # Useful in Docker environments
db_name
]
logger.info(f"Executing mysqldump: {' '.join(cmd[:3])}... [password hidden]")
with open(backup_path, 'w') as f:
result = subprocess.run(
cmd,
stdout=f,
stderr=subprocess.PIPE,
env=env,
text=True,
timeout=300 # 5 minute timeout
)
if result.returncode != 0:
error_msg = result.stderr or 'Unknown error'
# Provide more helpful error messages
if 'Can\'t connect' in error_msg or '2002' in error_msg or '2003' in error_msg:
if in_docker:
enhanced_error = (
f"Connection failed. Running in Docker environment.\n"
f"Attempted to connect to: {actual_host}:{db_port}\n"
f"Original error: {error_msg}\n"
f"Tip: Ensure MySQL service is accessible from this container. "
f"If using docker-compose, the service name should be 'mysql' or set MYSQL_SERVICE_NAME env var."
)
else:
enhanced_error = (
f"Connection failed to MySQL server.\n"
f"Attempted to connect to: {actual_host}:{db_port}\n"
f"Original error: {error_msg}\n"
f"Tip: Ensure MySQL server is running and accessible at {actual_host}:{db_port}"
)
raise Exception(f'Backup failed: {enhanced_error}')
raise Exception(f'Backup failed: {error_msg}')
# Get backup size
backup_size = backup_path.stat().st_size
# Create metadata
metadata = {
'filename': backup_filename,
'path': str(backup_path),
'size_bytes': backup_size,
'size_mb': round(backup_size / (1024 * 1024), 2),
'created_at': datetime.utcnow().isoformat(),
'database': db_name,
'status': 'success'
}
# Save metadata
metadata_path = backup_path.with_suffix('.json')
with open(metadata_path, 'w') as f:
json.dump(metadata, f, indent=2)
logger.info(f'Backup created: {backup_filename} ({metadata["size_mb"]} MB)')
return metadata
except Exception as e:
logger.error(f'Backup failed: {str(e)}', exc_info=True)
raise
@staticmethod
def list_backups() -> list[Dict[str, Any]]:
"""List all available backups."""
BackupService.ensure_backup_dir()
backups = []
for backup_file in BackupService.BACKUP_DIR.glob('backup_*.sql'):
metadata_file = backup_file.with_suffix('.json')
if metadata_file.exists():
try:
with open(metadata_file, 'r') as f:
metadata = json.load(f)
backups.append(metadata)
except Exception as e:
logger.warning(f'Error reading backup metadata {metadata_file}: {str(e)}')
# Sort by creation date (newest first)
backups.sort(key=lambda x: x.get('created_at', ''), reverse=True)
return backups
@staticmethod
def cleanup_old_backups() -> int:
"""Remove backups older than retention period."""
BackupService.ensure_backup_dir()
cutoff_date = datetime.utcnow() - timedelta(days=BackupService.RETENTION_DAYS)
removed_count = 0
for backup_file in BackupService.BACKUP_DIR.glob('backup_*.sql'):
metadata_file = backup_file.with_suffix('.json')
if metadata_file.exists():
try:
with open(metadata_file, 'r') as f:
metadata = json.load(f)
created_at = datetime.fromisoformat(metadata.get('created_at', ''))
if created_at < cutoff_date:
backup_file.unlink()
metadata_file.unlink()
removed_count += 1
logger.info(f'Removed old backup: {backup_file.name}')
except Exception as e:
logger.warning(f'Error processing backup {backup_file}: {str(e)}')
return removed_count
@staticmethod
def get_backup_info(backup_filename: str) -> Optional[Dict[str, Any]]:
"""Get information about a specific backup."""
backup_path = BackupService.BACKUP_DIR / backup_filename
metadata_path = backup_path.with_suffix('.json')
if not metadata_path.exists():
return None
try:
with open(metadata_path, 'r') as f:
return json.load(f)
except Exception as e:
logger.error(f'Error reading backup info: {str(e)}')
return None
backup_service = BackupService()

Some files were not shown because too many files have changed in this diff Show More