This commit is contained in:
Iliyan Angelov
2025-12-01 06:50:10 +02:00
parent 91f51bc6fe
commit 62c1fe5951
4682 changed files with 544807 additions and 31208 deletions

View File

@@ -7,6 +7,7 @@ import uuid
import os
from ...shared.config.database import get_db
from ..services.auth_service import auth_service
from ..services.session_service import session_service
from ..schemas.auth import RegisterRequest, LoginRequest, RefreshTokenRequest, ForgotPasswordRequest, ResetPasswordRequest, AuthResponse, TokenResponse, MessageResponse, MFAInitResponse, EnableMFARequest, VerifyMFARequest, MFAStatusResponse, UpdateProfileRequest
from ...security.middleware.auth import get_current_user
from ..models.user import User
@@ -85,6 +86,26 @@ async def register(
path='/'
)
# Create user session for new registration
try:
# Extract device info from user agent
device_info = None
if user_agent:
device_info = {'user_agent': user_agent}
session_service.create_session(
db=db,
user_id=result['user']['id'],
ip_address=client_ip,
user_agent=user_agent,
device_info=str(device_info) if device_info else None
)
except Exception as e:
# Log error but don't fail registration if session creation fails
from ...shared.config.logging_config import get_logger
logger = get_logger(__name__)
logger.warning(f'Failed to create session during registration: {str(e)}')
# Log successful registration
await audit_service.log_action(
db=db,
@@ -171,6 +192,26 @@ async def login(
path='/'
)
# Create user session
try:
# Extract device info from user agent
device_info = None
if user_agent:
device_info = {'user_agent': user_agent}
session_service.create_session(
db=db,
user_id=result['user']['id'],
ip_address=client_ip,
user_agent=user_agent,
device_info=str(device_info) if device_info else None
)
except Exception as e:
# Log error but don't fail login if session creation fails
from ...shared.config.logging_config import get_logger
logger = get_logger(__name__)
logger.warning(f'Failed to create session during login: {str(e)}')
# Log successful login
await audit_service.log_action(
db=db,
@@ -394,16 +435,23 @@ async def upload_avatar(request: Request, image: UploadFile=File(...), current_u
# Validate file completely (MIME type, size, magic bytes, integrity)
content = await validate_uploaded_image(image, max_avatar_size)
upload_dir = Path(__file__).parent.parent.parent / 'uploads' / 'avatars'
# Use same path calculation as main.py: go from Backend/src/auth/routes/auth_routes.py
# to Backend/uploads/avatars
upload_dir = Path(__file__).parent.parent.parent.parent / 'uploads' / 'avatars'
upload_dir.mkdir(parents=True, exist_ok=True)
if current_user.avatar:
old_avatar_path = Path(__file__).parent.parent.parent / current_user.avatar.lstrip('/')
old_avatar_path = Path(__file__).parent.parent.parent.parent / current_user.avatar.lstrip('/')
if old_avatar_path.exists() and old_avatar_path.is_file():
try:
old_avatar_path.unlink()
except Exception:
pass
ext = Path(image.filename).suffix or '.png'
# Sanitize filename to prevent path traversal attacks
from ...shared.utils.sanitization import sanitize_filename
original_filename = image.filename or 'avatar.png'
sanitized_filename = sanitize_filename(original_filename)
ext = Path(sanitized_filename).suffix or '.png'
# Generate secure filename with user ID and UUID to prevent collisions
filename = f'avatar-{current_user.id}-{uuid.uuid4()}{ext}'
file_path = upload_dir / filename
async with aiofiles.open(file_path, 'wb') as f:

View File

@@ -1,14 +1,17 @@
"""
User session management routes.
"""
from fastapi import APIRouter, Depends, HTTPException
from fastapi import APIRouter, Depends, HTTPException, Request, Response, Cookie
from sqlalchemy.orm import Session
from ...shared.config.database import get_db
from ...shared.config.logging_config import get_logger
from ...shared.config.settings import settings
from ...security.middleware.auth import get_current_user
from ...auth.models.user import User
from ...auth.models.user_session import UserSession
from ...auth.services.session_service import session_service
from ...shared.utils.response_helpers import success_response
from jose import jwt
logger = get_logger(__name__)
router = APIRouter(prefix='/sessions', tags=['sessions'])
@@ -44,13 +47,15 @@ async def get_my_sessions(
@router.delete('/{session_id}')
async def revoke_session(
session_id: int,
request: Request,
response: Response,
current_user: User = Depends(get_current_user),
access_token: str = Cookie(None, alias='accessToken'),
db: Session = Depends(get_db)
):
"""Revoke a specific session."""
try:
# Verify session belongs to user
from ...auth.models.user_session import UserSession
session = db.query(UserSession).filter(
UserSession.id == session_id,
UserSession.user_id == current_user.id
@@ -59,10 +64,62 @@ async def revoke_session(
if not session:
raise HTTPException(status_code=404, detail='Session not found')
# Check if this is the current session being revoked
# We detect this by checking if:
# 1. The session IP matches the request IP (if available)
# 2. The session is the most recent active session
is_current_session = False
try:
client_ip = request.client.host if request.client else None
user_agent = request.headers.get('User-Agent', '')
# Check if session matches current request characteristics
if client_ip and session.ip_address == client_ip:
# Also check if it's the most recent session
recent_session = db.query(UserSession).filter(
UserSession.user_id == current_user.id,
UserSession.is_active == True
).order_by(UserSession.last_activity.desc()).first()
if recent_session and recent_session.id == session_id:
is_current_session = True
except Exception as e:
logger.warning(f'Could not determine if session is current: {str(e)}')
# If we can't determine, check if it's the only active session
active_sessions_count = db.query(UserSession).filter(
UserSession.user_id == current_user.id,
UserSession.is_active == True
).count()
if active_sessions_count <= 1:
is_current_session = True
success = session_service.revoke_session(db=db, session_token=session.session_token)
if not success:
raise HTTPException(status_code=404, detail='Session not found')
# If this was the current session, clear cookies and indicate logout needed
if is_current_session:
from ...shared.config.settings import settings
samesite_value = 'strict' if settings.is_production else 'lax'
# Clear access token cookie
response.delete_cookie(
key='accessToken',
path='/',
samesite=samesite_value,
secure=settings.is_production
)
# Clear refresh token cookie
response.delete_cookie(
key='refreshToken',
path='/',
samesite=samesite_value,
secure=settings.is_production
)
return success_response(
message='Session revoked successfully. You have been logged out.',
data={'logout_required': True}
)
return success_response(message='Session revoked successfully')
except HTTPException:
raise
@@ -72,19 +129,41 @@ async def revoke_session(
@router.post('/revoke-all')
async def revoke_all_sessions(
request: Request,
response: Response,
current_user: User = Depends(get_current_user),
access_token: str = Cookie(None, alias='accessToken'),
db: Session = Depends(get_db)
):
"""Revoke all sessions for current user."""
try:
count = session_service.revoke_all_user_sessions(
db=db,
user_id=current_user.id
user_id=current_user.id,
exclude_token=None # Don't exclude current session, revoke all
)
# Clear cookies since all sessions (including current) are revoked
from ...shared.config.settings import settings
samesite_value = 'strict' if settings.is_production else 'lax'
# Clear access token cookie
response.delete_cookie(
key='accessToken',
path='/',
samesite=samesite_value,
secure=settings.is_production
)
# Clear refresh token cookie
response.delete_cookie(
key='refreshToken',
path='/',
samesite=samesite_value,
secure=settings.is_production
)
return success_response(
data={'revoked_count': count},
message=f'Revoked {count} session(s)'
data={'revoked_count': count, 'logout_required': True},
message=f'Revoked {count} session(s). You have been logged out.'
)
except Exception as e:
logger.error(f'Error revoking all sessions: {str(e)}', exc_info=True)

View File

@@ -29,19 +29,13 @@ class AuthService:
if not self.jwt_secret:
error_msg = (
'CRITICAL: JWT_SECRET is not configured. '
'Please set JWT_SECRET environment variable to a secure random string (minimum 32 characters).'
'Please set JWT_SECRET environment variable to a secure random string (minimum 64 characters). '
'Generate one using: python -c "import secrets; print(secrets.token_urlsafe(64))"'
)
logger.error(error_msg)
if settings.is_production:
raise ValueError(error_msg)
else:
# In development, generate a secure secret but warn
import secrets
self.jwt_secret = secrets.token_urlsafe(64)
logger.warning(
f'JWT_SECRET not configured. Auto-generated secret for development. '
f'Set JWT_SECRET environment variable for production: {self.jwt_secret}'
)
# SECURITY: Always fail if JWT_SECRET is not configured, even in development
# This prevents accidental deployment without proper secrets
raise ValueError(error_msg)
# Validate JWT secret strength
if len(self.jwt_secret) < 32:
@@ -65,14 +59,37 @@ class AuthService:
self.jwt_refresh_expires_in = os.getenv("JWT_REFRESH_EXPIRES_IN", "7d")
def generate_tokens(self, user_id: int) -> dict:
from datetime import datetime, timedelta
# SECURITY: Add standard JWT claims for better security
now = datetime.utcnow()
access_expires = now + timedelta(minutes=settings.JWT_ACCESS_TOKEN_EXPIRE_MINUTES)
refresh_expires = now + timedelta(days=settings.JWT_REFRESH_TOKEN_EXPIRE_DAYS)
access_payload = {
"userId": user_id,
"exp": access_expires, # Expiration time
"iat": now, # Issued at
"iss": settings.APP_NAME, # Issuer
"type": "access" # Token type
}
refresh_payload = {
"userId": user_id,
"exp": refresh_expires, # Expiration time
"iat": now, # Issued at
"iss": settings.APP_NAME, # Issuer
"type": "refresh" # Token type
}
access_token = jwt.encode(
{"userId": user_id},
access_payload,
self.jwt_secret,
algorithm="HS256"
)
refresh_token = jwt.encode(
{"userId": user_id},
refresh_payload,
self.jwt_refresh_secret,
algorithm="HS256"
)
@@ -316,8 +333,22 @@ class AuthService:
db.commit()
raise ValueError("Refresh token expired")
from datetime import datetime, timedelta
# SECURITY: Add standard JWT claims when refreshing token
now = datetime.utcnow()
access_expires = now + timedelta(minutes=settings.JWT_ACCESS_TOKEN_EXPIRE_MINUTES)
access_payload = {
"userId": decoded["userId"],
"exp": access_expires, # Expiration time
"iat": now, # Issued at
"iss": settings.APP_NAME, # Issuer
"type": "access" # Token type
}
access_token = jwt.encode(
{"userId": decoded["userId"]},
access_payload,
self.jwt_secret,
algorithm="HS256"
)

View File

@@ -4,7 +4,7 @@ from sqlalchemy import and_, or_, func
from sqlalchemy.exc import IntegrityError
from typing import Optional
from datetime import datetime
import random
import secrets
import os
from ...shared.config.database import get_db
from ...shared.config.settings import settings
@@ -37,7 +37,8 @@ def _generate_invoice_email_html(invoice: dict, is_proforma: bool=False) -> str:
def generate_booking_number() -> str:
prefix = 'BK'
ts = int(datetime.utcnow().timestamp() * 1000)
rand = random.randint(1000, 9999)
# Use cryptographically secure random number to prevent enumeration attacks
rand = secrets.randbelow(9000) + 1000 # Random number between 1000-9999
return f'{prefix}-{ts}-{rand}'
def calculate_booking_payment_balance(booking: Booking) -> dict:

View File

@@ -1,7 +1,7 @@
from sqlalchemy.orm import Session
from datetime import datetime, timedelta
from typing import Optional, List, Dict, Any
import random
import secrets
import string
from decimal import Decimal
from ..models.group_booking import (
@@ -21,11 +21,13 @@ class GroupBookingService:
@staticmethod
def generate_group_booking_number(db: Session) -> str:
"""Generate unique group booking number"""
"""Generate unique group booking number using cryptographically secure random"""
max_attempts = 10
alphabet = string.ascii_uppercase + string.digits
for _ in range(max_attempts):
timestamp = datetime.utcnow().strftime('%Y%m%d')
random_suffix = ''.join(random.choices(string.ascii_uppercase + string.digits, k=6))
# Use secrets.choice() instead of random.choices() for security
random_suffix = ''.join(secrets.choice(alphabet) for _ in range(6))
booking_number = f"GRP-{timestamp}-{random_suffix}"
existing = db.query(GroupBooking).filter(
@@ -35,8 +37,9 @@ class GroupBookingService:
if not existing:
return booking_number
# Fallback
return f"GRP-{int(datetime.utcnow().timestamp())}"
# Fallback with secure random suffix
random_suffix = ''.join(secrets.choice(alphabet) for _ in range(4))
return f"GRP-{int(datetime.utcnow().timestamp())}{random_suffix}"
@staticmethod
def calculate_group_discount(
@@ -405,17 +408,19 @@ class GroupBookingService:
# Use proportional share
booking_price = group_booking.total_price / group_booking.total_rooms
# Generate booking number
import random
# Generate booking number using cryptographically secure random
prefix = 'BK'
ts = int(datetime.utcnow().timestamp() * 1000)
rand = random.randint(1000, 9999)
# Use secrets.randbelow() instead of random.randint() for security
rand = secrets.randbelow(9000) + 1000 # Random number between 1000-9999
booking_number = f'{prefix}-{ts}-{rand}'
# Ensure uniqueness
existing = db.query(Booking).filter(Booking.booking_number == booking_number).first()
if existing:
booking_number = f'{prefix}-{ts}-{rand + 1}'
# If collision, generate new secure random number
rand = secrets.randbelow(9000) + 1000
booking_number = f'{prefix}-{ts}-{rand}'
# Create booking
booking = Booking(

View File

@@ -0,0 +1,26 @@
"""
GDPR Compliance Models.
"""
from .gdpr_request import GDPRRequest, GDPRRequestType, GDPRRequestStatus
from .consent import Consent, ConsentType, ConsentStatus
from .data_processing_record import DataProcessingRecord, ProcessingCategory, LegalBasis
from .data_breach import DataBreach, BreachType, BreachStatus
from .data_retention import RetentionRule, DataRetentionLog
__all__ = [
'GDPRRequest',
'GDPRRequestType',
'GDPRRequestStatus',
'Consent',
'ConsentType',
'ConsentStatus',
'DataProcessingRecord',
'ProcessingCategory',
'LegalBasis',
'DataBreach',
'BreachType',
'BreachStatus',
'RetentionRule',
'DataRetentionLog',
]

View File

@@ -0,0 +1,64 @@
"""
GDPR Consent Management Model.
"""
from sqlalchemy import Column, Integer, String, Text, DateTime, ForeignKey, Enum, JSON, Boolean
from sqlalchemy.orm import relationship
from datetime import datetime
import enum
from ...shared.config.database import Base
class ConsentType(str, enum.Enum):
"""Types of consent that can be given or withdrawn."""
marketing = "marketing"
analytics = "analytics"
necessary = "necessary"
preferences = "preferences"
third_party_sharing = "third_party_sharing"
profiling = "profiling"
automated_decision_making = "automated_decision_making"
class ConsentStatus(str, enum.Enum):
"""Status of consent."""
granted = "granted"
withdrawn = "withdrawn"
pending = "pending"
expired = "expired"
class Consent(Base):
"""Model for tracking user consent for GDPR compliance."""
__tablename__ = 'consents'
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
user_id = Column(Integer, ForeignKey('users.id'), nullable=True, index=True) # Nullable for anonymous users
user_email = Column(String(255), nullable=True, index=True) # Email for anonymous users
is_anonymous = Column(Boolean, default=False, nullable=False, index=True) # Flag for anonymous consent
consent_type = Column(Enum(ConsentType), nullable=False, index=True)
status = Column(Enum(ConsentStatus), default=ConsentStatus.granted, nullable=False, index=True)
# Consent details
granted_at = Column(DateTime, nullable=True)
withdrawn_at = Column(DateTime, nullable=True)
expires_at = Column(DateTime, nullable=True) # For time-limited consent
# Legal basis (Article 6 GDPR)
legal_basis = Column(String(100), nullable=True) # consent, contract, legal_obligation, vital_interests, public_task, legitimate_interests
# Consent method
consent_method = Column(String(50), nullable=True) # explicit, implicit, pre_checked
consent_version = Column(String(20), nullable=True) # Version of privacy policy when consent was given
# Metadata
ip_address = Column(String(45), nullable=True)
user_agent = Column(String(255), nullable=True)
source = Column(String(100), nullable=True) # Where consent was given (registration, cookie_banner, etc.)
# Additional data
extra_metadata = Column(JSON, nullable=True)
# Timestamps
created_at = Column(DateTime, default=datetime.utcnow, nullable=False, index=True)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
# Relationships
user = relationship('User', foreign_keys=[user_id])

View File

@@ -0,0 +1,70 @@
"""
GDPR Data Breach Notification Model (Article 33-34 GDPR).
"""
from sqlalchemy import Column, Integer, String, Text, DateTime, ForeignKey, Enum, JSON, Boolean
from sqlalchemy.orm import relationship
from datetime import datetime
import enum
from ...shared.config.database import Base
class BreachType(str, enum.Enum):
"""Types of data breaches."""
confidentiality = "confidentiality" # Unauthorized disclosure
integrity = "integrity" # Unauthorized alteration
availability = "availability" # Unauthorized destruction or loss
class BreachStatus(str, enum.Enum):
"""Status of breach notification."""
detected = "detected"
investigating = "investigating"
contained = "contained"
reported_to_authority = "reported_to_authority"
notified_data_subjects = "notified_data_subjects"
resolved = "resolved"
class DataBreach(Base):
"""Data breach notification record (Articles 33-34 GDPR)."""
__tablename__ = 'data_breaches'
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
# Breach details
breach_type = Column(Enum(BreachType), nullable=False, index=True)
status = Column(Enum(BreachStatus), default=BreachStatus.detected, nullable=False, index=True)
# Description
description = Column(Text, nullable=False) # Nature of the breach
affected_data_categories = Column(JSON, nullable=True) # Categories of personal data affected
affected_data_subjects = Column(JSON, nullable=True) # Approximate number of affected individuals
# Timeline
detected_at = Column(DateTime, nullable=False, index=True)
occurred_at = Column(DateTime, nullable=True) # When breach occurred (if known)
contained_at = Column(DateTime, nullable=True)
# Notification
reported_to_authority_at = Column(DateTime, nullable=True) # Article 33 - 72 hours
authority_reference = Column(String(255), nullable=True) # Reference from supervisory authority
notified_data_subjects_at = Column(DateTime, nullable=True) # Article 34 - without undue delay
notification_method = Column(String(100), nullable=True) # email, public_notice, etc.
# Risk assessment
likely_consequences = Column(Text, nullable=True)
measures_proposed = Column(Text, nullable=True) # Measures to address the breach
risk_level = Column(String(20), nullable=True) # low, medium, high
# Reporting
reported_by = Column(Integer, ForeignKey('users.id'), nullable=False) # Who detected/reported
investigated_by = Column(Integer, ForeignKey('users.id'), nullable=True) # DPO or responsible person
# Additional details
extra_metadata = Column(JSON, nullable=True)
# Timestamps
created_at = Column(DateTime, default=datetime.utcnow, nullable=False, index=True)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
# Relationships
reporter = relationship('User', foreign_keys=[reported_by])
investigator = relationship('User', foreign_keys=[investigated_by])

View File

@@ -0,0 +1,78 @@
"""
GDPR Data Processing Records Model (Article 30 GDPR).
"""
from sqlalchemy import Column, Integer, String, Text, DateTime, ForeignKey, Enum, JSON, Boolean
from sqlalchemy.orm import relationship
from datetime import datetime
import enum
from ...shared.config.database import Base
class ProcessingCategory(str, enum.Enum):
"""Categories of data processing."""
collection = "collection"
storage = "storage"
usage = "usage"
sharing = "sharing"
deletion = "deletion"
anonymization = "anonymization"
transfer = "transfer"
class LegalBasis(str, enum.Enum):
"""Legal basis for processing (Article 6 GDPR)."""
consent = "consent"
contract = "contract"
legal_obligation = "legal_obligation"
vital_interests = "vital_interests"
public_task = "public_task"
legitimate_interests = "legitimate_interests"
class DataProcessingRecord(Base):
"""Record of data processing activities (Article 30 GDPR requirement)."""
__tablename__ = 'data_processing_records'
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
# Processing details
processing_category = Column(Enum(ProcessingCategory), nullable=False, index=True)
legal_basis = Column(Enum(LegalBasis), nullable=False, index=True)
purpose = Column(Text, nullable=False) # Purpose of processing
# Data categories
data_categories = Column(JSON, nullable=True) # List of data categories processed
data_subjects = Column(JSON, nullable=True) # Categories of data subjects
# Recipients
recipients = Column(JSON, nullable=True) # Categories of recipients (internal, third_party, etc.)
third_parties = Column(JSON, nullable=True) # Specific third parties if any
# Transfers
transfers_to_third_countries = Column(Boolean, default=False, nullable=False)
transfer_countries = Column(JSON, nullable=True) # List of countries
safeguards = Column(Text, nullable=True) # Safeguards for transfers
# Retention
retention_period = Column(String(100), nullable=True) # How long data is retained
retention_criteria = Column(Text, nullable=True) # Criteria for determining retention period
# Security measures
security_measures = Column(Text, nullable=True)
# Related entities
user_id = Column(Integer, ForeignKey('users.id'), nullable=True, index=True) # If specific to a user
related_booking_id = Column(Integer, nullable=True, index=True)
related_payment_id = Column(Integer, nullable=True, index=True)
# Processing details
processed_by = Column(Integer, ForeignKey('users.id'), nullable=True) # Staff/admin who processed
processing_timestamp = Column(DateTime, default=datetime.utcnow, nullable=False, index=True)
# Additional metadata
extra_metadata = Column(JSON, nullable=True)
# Timestamps
created_at = Column(DateTime, default=datetime.utcnow, nullable=False, index=True)
# Relationships
user = relationship('User', foreign_keys=[user_id])
processor = relationship('User', foreign_keys=[processed_by])

View File

@@ -0,0 +1,75 @@
"""
GDPR Data Retention Policy Model.
"""
from sqlalchemy import Column, Integer, String, Text, DateTime, ForeignKey, Enum, JSON, Boolean
from sqlalchemy.orm import relationship
from datetime import datetime, timedelta
import enum
from ...shared.config.database import Base
class RetentionRule(Base):
"""Data retention rules for different data types."""
__tablename__ = 'retention_rules'
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
# Rule details
data_category = Column(String(100), nullable=False, unique=True, index=True) # user_data, booking_data, payment_data, etc.
retention_period_days = Column(Integer, nullable=False) # Number of days to retain
retention_period_months = Column(Integer, nullable=True) # Alternative: months
retention_period_years = Column(Integer, nullable=True) # Alternative: years
# Legal basis
legal_basis = Column(Text, nullable=True) # Why we retain for this period
legal_requirement = Column(Text, nullable=True) # Specific legal requirement if any
# Action after retention
action_after_retention = Column(String(50), nullable=False, default='anonymize') # delete, anonymize, archive
# Conditions
conditions = Column(JSON, nullable=True) # Additional conditions (e.g., active bookings)
# Status
is_active = Column(Boolean, default=True, nullable=False, index=True)
# Metadata
description = Column(Text, nullable=True)
created_by = Column(Integer, ForeignKey('users.id'), nullable=True)
# Timestamps
created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
# Relationships
creator = relationship('User', foreign_keys=[created_by])
class DataRetentionLog(Base):
"""Log of data retention actions performed."""
__tablename__ = 'data_retention_logs'
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
# Retention action
retention_rule_id = Column(Integer, ForeignKey('retention_rules.id'), nullable=False, index=True)
data_category = Column(String(100), nullable=False, index=True)
action_taken = Column(String(50), nullable=False) # deleted, anonymized, archived
# Affected records
records_affected = Column(Integer, nullable=False, default=0)
affected_ids = Column(JSON, nullable=True) # IDs of affected records (for audit)
# Execution
executed_by = Column(Integer, ForeignKey('users.id'), nullable=True) # System or admin
executed_at = Column(DateTime, default=datetime.utcnow, nullable=False, index=True)
# Results
success = Column(Boolean, default=True, nullable=False)
error_message = Column(Text, nullable=True)
# Metadata
extra_metadata = Column(JSON, nullable=True)
# Relationships
retention_rule = relationship('RetentionRule', foreign_keys=[retention_rule_id])
executor = relationship('User', foreign_keys=[executed_by])

View File

@@ -1,7 +1,7 @@
"""
GDPR compliance models for data export and deletion requests.
"""
from sqlalchemy import Column, Integer, String, Text, DateTime, ForeignKey, Enum, JSON
from sqlalchemy import Column, Integer, String, Text, DateTime, ForeignKey, Enum, JSON, Boolean
from sqlalchemy.orm import relationship
from datetime import datetime
import enum
@@ -27,9 +27,10 @@ class GDPRRequest(Base):
request_type = Column(Enum(GDPRRequestType), nullable=False, index=True)
status = Column(Enum(GDPRRequestStatus), default=GDPRRequestStatus.pending, nullable=False, index=True)
# User making the request
user_id = Column(Integer, ForeignKey('users.id'), nullable=False, index=True)
user_email = Column(String(255), nullable=False) # Store email even if user is deleted
# User making the request (nullable for anonymous users)
user_id = Column(Integer, ForeignKey('users.id'), nullable=True, index=True)
user_email = Column(String(255), nullable=False) # Required: email for anonymous or registered users
is_anonymous = Column(Boolean, default=False, nullable=False, index=True) # Flag for anonymous requests
# Request details
request_data = Column(JSON, nullable=True) # Additional request parameters

View File

@@ -0,0 +1,340 @@
"""
Admin routes for GDPR compliance management.
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Body
from sqlalchemy.orm import Session
from typing import Optional, Dict, Any, List
from pydantic import BaseModel
from datetime import datetime
from ...shared.config.database import get_db
from ...shared.config.logging_config import get_logger
from ...security.middleware.auth import authorize_roles
from ...auth.models.user import User
from ..services.breach_service import breach_service
from ..services.retention_service import retention_service
from ..services.data_processing_service import data_processing_service
from ..models.data_breach import BreachType, BreachStatus
from ...shared.utils.response_helpers import success_response
logger = get_logger(__name__)
router = APIRouter(prefix='/gdpr/admin', tags=['gdpr-admin'])
# Data Breach Management
class BreachCreateRequest(BaseModel):
breach_type: str
description: str
affected_data_categories: Optional[List[str]] = None
affected_data_subjects: Optional[int] = None
occurred_at: Optional[str] = None
likely_consequences: Optional[str] = None
measures_proposed: Optional[str] = None
risk_level: Optional[str] = None
@router.post('/breaches')
async def create_breach(
breach_data: BreachCreateRequest,
current_user: User = Depends(authorize_roles('admin')),
db: Session = Depends(get_db)
):
"""Create a data breach record (admin only)."""
try:
try:
breach_type_enum = BreachType(breach_data.breach_type)
except ValueError:
raise HTTPException(status_code=400, detail=f'Invalid breach type: {breach_data.breach_type}')
occurred_at = None
if breach_data.occurred_at:
occurred_at = datetime.fromisoformat(breach_data.occurred_at.replace('Z', '+00:00'))
breach = await breach_service.create_breach(
db=db,
breach_type=breach_type_enum,
description=breach_data.description,
reported_by=current_user.id,
affected_data_categories=breach_data.affected_data_categories,
affected_data_subjects=breach_data.affected_data_subjects,
occurred_at=occurred_at,
likely_consequences=breach_data.likely_consequences,
measures_proposed=breach_data.measures_proposed,
risk_level=breach_data.risk_level
)
return success_response(
data={
'breach_id': breach.id,
'status': breach.status.value,
'detected_at': breach.detected_at.isoformat()
},
message='Data breach record created'
)
except HTTPException:
raise
except Exception as e:
logger.error(f'Error creating breach: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.get('/breaches')
async def get_breaches(
status: Optional[str] = Query(None),
page: int = Query(1, ge=1),
limit: int = Query(20, ge=1, le=100),
current_user: User = Depends(authorize_roles('admin')),
db: Session = Depends(get_db)
):
"""Get all data breaches (admin only)."""
try:
status_enum = None
if status:
try:
status_enum = BreachStatus(status)
except ValueError:
raise HTTPException(status_code=400, detail=f'Invalid status: {status}')
offset = (page - 1) * limit
breaches = breach_service.get_breaches(
db=db,
status=status_enum,
limit=limit,
offset=offset
)
return success_response(data={
'breaches': [{
'id': breach.id,
'breach_type': breach.breach_type.value,
'status': breach.status.value,
'description': breach.description,
'risk_level': breach.risk_level,
'detected_at': breach.detected_at.isoformat(),
'reported_to_authority_at': breach.reported_to_authority_at.isoformat() if breach.reported_to_authority_at else None,
'notified_data_subjects_at': breach.notified_data_subjects_at.isoformat() if breach.notified_data_subjects_at else None,
} for breach in breaches]
})
except HTTPException:
raise
except Exception as e:
logger.error(f'Error getting breaches: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.post('/breaches/{breach_id}/report-authority')
async def report_breach_to_authority(
breach_id: int,
authority_reference: str = Body(...),
current_user: User = Depends(authorize_roles('admin')),
db: Session = Depends(get_db)
):
"""Report breach to supervisory authority (admin only)."""
try:
breach = await breach_service.report_to_authority(
db=db,
breach_id=breach_id,
authority_reference=authority_reference,
reported_by=current_user.id
)
return success_response(
data={
'breach_id': breach.id,
'authority_reference': breach.authority_reference,
'reported_at': breach.reported_to_authority_at.isoformat()
},
message='Breach reported to supervisory authority'
)
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
logger.error(f'Error reporting breach: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.post('/breaches/{breach_id}/notify-subjects')
async def notify_data_subjects(
breach_id: int,
notification_method: str = Body(...),
current_user: User = Depends(authorize_roles('admin')),
db: Session = Depends(get_db)
):
"""Notify affected data subjects (admin only)."""
try:
breach = await breach_service.notify_data_subjects(
db=db,
breach_id=breach_id,
notification_method=notification_method,
notified_by=current_user.id
)
return success_response(
data={
'breach_id': breach.id,
'notification_method': breach.notification_method,
'notified_at': breach.notified_data_subjects_at.isoformat()
},
message='Data subjects notified'
)
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
logger.error(f'Error notifying subjects: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
# Data Retention Management
class RetentionRuleCreateRequest(BaseModel):
data_category: str
retention_period_days: int
retention_period_months: Optional[int] = None
retention_period_years: Optional[int] = None
legal_basis: Optional[str] = None
legal_requirement: Optional[str] = None
action_after_retention: str = 'anonymize'
conditions: Optional[Dict[str, Any]] = None
description: Optional[str] = None
@router.post('/retention-rules')
async def create_retention_rule(
rule_data: RetentionRuleCreateRequest,
current_user: User = Depends(authorize_roles('admin')),
db: Session = Depends(get_db)
):
"""Create a data retention rule (admin only)."""
try:
rule = retention_service.create_retention_rule(
db=db,
data_category=rule_data.data_category,
retention_period_days=rule_data.retention_period_days,
retention_period_months=rule_data.retention_period_months,
retention_period_years=rule_data.retention_period_years,
legal_basis=rule_data.legal_basis,
legal_requirement=rule_data.legal_requirement,
action_after_retention=rule_data.action_after_retention,
conditions=rule_data.conditions,
description=rule_data.description,
created_by=current_user.id
)
return success_response(
data={
'rule_id': rule.id,
'data_category': rule.data_category,
'retention_period_days': rule.retention_period_days
},
message='Retention rule created successfully'
)
except Exception as e:
logger.error(f'Error creating retention rule: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.get('/retention-rules')
async def get_retention_rules(
is_active: Optional[bool] = Query(None),
current_user: User = Depends(authorize_roles('admin')),
db: Session = Depends(get_db)
):
"""Get retention rules (admin only)."""
try:
rules = retention_service.get_retention_rules(db=db, is_active=is_active)
return success_response(data={
'rules': [{
'id': rule.id,
'data_category': rule.data_category,
'retention_period_days': rule.retention_period_days,
'action_after_retention': rule.action_after_retention,
'is_active': rule.is_active,
'legal_basis': rule.legal_basis
} for rule in rules]
})
except Exception as e:
logger.error(f'Error getting retention rules: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.get('/retention-logs')
async def get_retention_logs(
data_category: Optional[str] = Query(None),
page: int = Query(1, ge=1),
limit: int = Query(50, ge=1, le=100),
current_user: User = Depends(authorize_roles('admin')),
db: Session = Depends(get_db)
):
"""Get retention action logs (admin only)."""
try:
offset = (page - 1) * limit
logs = retention_service.get_retention_logs(
db=db,
data_category=data_category,
limit=limit,
offset=offset
)
return success_response(data={
'logs': [{
'id': log.id,
'data_category': log.data_category,
'action_taken': log.action_taken,
'records_affected': log.records_affected,
'executed_at': log.executed_at.isoformat(),
'success': log.success
} for log in logs]
})
except Exception as e:
logger.error(f'Error getting retention logs: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
# Data Processing Records (Admin View)
@router.get('/processing-records')
async def get_all_processing_records(
user_id: Optional[int] = Query(None),
processing_category: Optional[str] = Query(None),
legal_basis: Optional[str] = Query(None),
page: int = Query(1, ge=1),
limit: int = Query(50, ge=1, le=100),
current_user: User = Depends(authorize_roles('admin')),
db: Session = Depends(get_db)
):
"""Get all data processing records (admin only)."""
try:
from ..models.data_processing_record import ProcessingCategory, LegalBasis
category_enum = None
if processing_category:
try:
category_enum = ProcessingCategory(processing_category)
except ValueError:
raise HTTPException(status_code=400, detail=f'Invalid processing category: {processing_category}')
basis_enum = None
if legal_basis:
try:
basis_enum = LegalBasis(legal_basis)
except ValueError:
raise HTTPException(status_code=400, detail=f'Invalid legal basis: {legal_basis}')
offset = (page - 1) * limit
records = data_processing_service.get_processing_records(
db=db,
user_id=user_id,
processing_category=category_enum,
legal_basis=basis_enum,
limit=limit,
offset=offset
)
return success_response(data={
'records': [{
'id': record.id,
'processing_category': record.processing_category.value,
'legal_basis': record.legal_basis.value,
'purpose': record.purpose,
'processing_timestamp': record.processing_timestamp.isoformat(),
'user_id': record.user_id
} for record in records]
})
except HTTPException:
raise
except Exception as e:
logger.error(f'Error getting processing records: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))

View File

@@ -3,46 +3,78 @@ GDPR compliance routes for data export and deletion.
"""
from fastapi import APIRouter, Depends, HTTPException, Query, Response
from sqlalchemy.orm import Session, noload
from sqlalchemy import or_
from typing import Optional
from datetime import datetime
from ...shared.config.database import get_db
from ...shared.config.logging_config import get_logger
from ...security.middleware.auth import get_current_user, authorize_roles
from ...security.middleware.auth import get_current_user, authorize_roles, get_current_user_optional
from ...auth.models.user import User
from ..services.gdpr_service import gdpr_service
from ..services.consent_service import consent_service
from ..services.data_processing_service import data_processing_service
from ..models.gdpr_request import GDPRRequest, GDPRRequestType, GDPRRequestStatus
from ..models.consent import ConsentType, ConsentStatus
from ...shared.utils.response_helpers import success_response
from fastapi import Request
from pydantic import BaseModel
from typing import Dict, Any, Optional, List
logger = get_logger(__name__)
router = APIRouter(prefix='/gdpr', tags=['gdpr'])
class AnonymousExportRequest(BaseModel):
email: str
@router.post('/export')
async def request_data_export(
request: Request,
current_user: User = Depends(get_current_user),
anonymous_request: Optional[AnonymousExportRequest] = None,
current_user: Optional[User] = Depends(get_current_user_optional),
db: Session = Depends(get_db)
):
"""Request export of user's personal data (GDPR)."""
"""Request export of user's personal data (GDPR) - supports both authenticated and anonymous users."""
try:
client_ip = request.client.host if request.client else None
user_agent = request.headers.get('User-Agent')
gdpr_request = await gdpr_service.create_data_export_request(
db=db,
user_id=current_user.id,
ip_address=client_ip,
user_agent=user_agent
)
# Check if authenticated or anonymous
if current_user:
# Authenticated user
gdpr_request = await gdpr_service.create_data_export_request(
db=db,
user_id=current_user.id,
ip_address=client_ip,
user_agent=user_agent,
is_anonymous=False
)
elif anonymous_request and anonymous_request.email:
# Anonymous user - requires email
gdpr_request = await gdpr_service.create_data_export_request(
db=db,
user_email=anonymous_request.email,
ip_address=client_ip,
user_agent=user_agent,
is_anonymous=True
)
else:
raise HTTPException(
status_code=400,
detail='Either authentication required or email must be provided for anonymous requests'
)
return success_response(
data={
'request_id': gdpr_request.id,
'verification_token': gdpr_request.verification_token,
'status': gdpr_request.status.value,
'expires_at': gdpr_request.expires_at.isoformat() if gdpr_request.expires_at else None
'expires_at': gdpr_request.expires_at.isoformat() if gdpr_request.expires_at else None,
'is_anonymous': gdpr_request.is_anonymous
},
message='Data export request created. You will receive an email with download link once ready.'
)
except ValueError as e:
raise HTTPException(status_code=400, detail=str(e))
except Exception as e:
logger.error(f'Error creating data export request: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@@ -51,20 +83,26 @@ async def request_data_export(
async def get_export_data(
request_id: int,
verification_token: str = Query(...),
current_user: User = Depends(get_current_user),
current_user: Optional[User] = Depends(get_current_user_optional),
db: Session = Depends(get_db)
):
"""Get exported user data."""
"""Get exported user data - supports both authenticated and anonymous users via verification token."""
try:
gdpr_request = db.query(GDPRRequest).options(
# Build query - verification token is required for both authenticated and anonymous
query = db.query(GDPRRequest).options(
noload(GDPRRequest.user),
noload(GDPRRequest.processor)
).filter(
GDPRRequest.id == request_id,
GDPRRequest.user_id == current_user.id,
GDPRRequest.verification_token == verification_token,
GDPRRequest.request_type == GDPRRequestType.data_export
).first()
)
# For authenticated users, also verify user_id matches
if current_user:
query = query.filter(GDPRRequest.user_id == current_user.id)
gdpr_request = query.first()
if not gdpr_request:
raise HTTPException(status_code=404, detail='Export request not found or invalid token')
@@ -73,8 +111,10 @@ async def get_export_data(
# Process export
export_data = await gdpr_service.export_user_data(
db=db,
user_id=current_user.id,
request_id=request_id
user_id=gdpr_request.user_id,
user_email=gdpr_request.user_email,
request_id=request_id,
is_anonymous=gdpr_request.is_anonymous
)
return success_response(data=export_data)
elif gdpr_request.status == GDPRRequestStatus.completed and gdpr_request.export_file_path:
@@ -97,32 +137,57 @@ async def get_export_data(
logger.error(f'Error getting export data: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
class AnonymousDeletionRequest(BaseModel):
email: str
@router.post('/delete')
async def request_data_deletion(
request: Request,
current_user: User = Depends(get_current_user),
anonymous_request: Optional[AnonymousDeletionRequest] = None,
current_user: Optional[User] = Depends(get_current_user_optional),
db: Session = Depends(get_db)
):
"""Request deletion of user's personal data (GDPR - Right to be Forgotten)."""
"""Request deletion of user's personal data (GDPR - Right to be Forgotten) - supports anonymous users."""
try:
client_ip = request.client.host if request.client else None
user_agent = request.headers.get('User-Agent')
gdpr_request = await gdpr_service.create_data_deletion_request(
db=db,
user_id=current_user.id,
ip_address=client_ip,
user_agent=user_agent
)
# Check if authenticated or anonymous
if current_user:
# Authenticated user
gdpr_request = await gdpr_service.create_data_deletion_request(
db=db,
user_id=current_user.id,
ip_address=client_ip,
user_agent=user_agent,
is_anonymous=False
)
elif anonymous_request and anonymous_request.email:
# Anonymous user - requires email
gdpr_request = await gdpr_service.create_data_deletion_request(
db=db,
user_email=anonymous_request.email,
ip_address=client_ip,
user_agent=user_agent,
is_anonymous=True
)
else:
raise HTTPException(
status_code=400,
detail='Either authentication required or email must be provided for anonymous requests'
)
return success_response(
data={
'request_id': gdpr_request.id,
'verification_token': gdpr_request.verification_token,
'status': gdpr_request.status.value
'status': gdpr_request.status.value,
'is_anonymous': gdpr_request.is_anonymous
},
message='Data deletion request created. Please verify via email to proceed.'
)
except ValueError as e:
raise HTTPException(status_code=400, detail=str(e))
except Exception as e:
logger.error(f'Error creating data deletion request: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@@ -131,21 +196,27 @@ async def request_data_deletion(
async def confirm_data_deletion(
request_id: int,
verification_token: str = Query(...),
current_user: User = Depends(get_current_user),
current_user: Optional[User] = Depends(get_current_user_optional),
db: Session = Depends(get_db)
):
"""Confirm and process data deletion request."""
"""Confirm and process data deletion request - supports anonymous users via verification token."""
try:
gdpr_request = db.query(GDPRRequest).options(
# Build query - verification token is required for both authenticated and anonymous
query = db.query(GDPRRequest).options(
noload(GDPRRequest.user),
noload(GDPRRequest.processor)
).filter(
GDPRRequest.id == request_id,
GDPRRequest.user_id == current_user.id,
GDPRRequest.verification_token == verification_token,
GDPRRequest.request_type == GDPRRequestType.data_deletion,
GDPRRequest.status == GDPRRequestStatus.pending
).first()
)
# For authenticated users, also verify user_id matches
if current_user:
query = query.filter(GDPRRequest.user_id == current_user.id)
gdpr_request = query.first()
if not gdpr_request:
raise HTTPException(status_code=404, detail='Deletion request not found or already processed')
@@ -153,14 +224,16 @@ async def confirm_data_deletion(
# Process deletion
deletion_log = await gdpr_service.delete_user_data(
db=db,
user_id=current_user.id,
user_id=gdpr_request.user_id,
user_email=gdpr_request.user_email,
request_id=request_id,
processed_by=current_user.id
processed_by=current_user.id if current_user else None,
is_anonymous=gdpr_request.is_anonymous
)
return success_response(
data=deletion_log,
message='Your data has been deleted successfully.'
message=deletion_log.get('summary', {}).get('message', 'Your data has been deleted successfully.')
)
except HTTPException:
raise
@@ -173,13 +246,17 @@ async def get_user_gdpr_requests(
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Get user's GDPR requests."""
"""Get user's GDPR requests (both authenticated and anonymous requests by email)."""
try:
# Get requests by user_id (authenticated) or by email (includes anonymous)
requests = db.query(GDPRRequest).options(
noload(GDPRRequest.user),
noload(GDPRRequest.processor)
).filter(
GDPRRequest.user_id == current_user.id
or_(
GDPRRequest.user_id == current_user.id,
GDPRRequest.user_email == current_user.email
)
).order_by(GDPRRequest.created_at.desc()).all()
return success_response(data={
@@ -187,6 +264,7 @@ async def get_user_gdpr_requests(
'id': req.id,
'request_type': req.request_type.value,
'status': req.status.value,
'is_anonymous': req.is_anonymous,
'created_at': req.created_at.isoformat() if req.created_at else None,
'processed_at': req.processed_at.isoformat() if req.processed_at else None,
} for req in requests]
@@ -270,3 +348,272 @@ async def delete_gdpr_request(
logger.error(f'Error deleting GDPR request: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
# GDPR Rights - Additional Routes
class DataRectificationRequest(BaseModel):
corrections: Dict[str, Any] # e.g., {"full_name": "New Name", "email": "new@email.com"}
@router.post('/rectify')
async def request_data_rectification(
request: Request,
rectification_data: DataRectificationRequest,
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Request data rectification (Article 16 GDPR - Right to rectification)."""
try:
client_ip = request.client.host if request.client else None
user_agent = request.headers.get('User-Agent')
gdpr_request = await gdpr_service.request_data_rectification(
db=db,
user_id=current_user.id,
corrections=rectification_data.corrections,
ip_address=client_ip,
user_agent=user_agent
)
return success_response(
data={
'request_id': gdpr_request.id,
'verification_token': gdpr_request.verification_token,
'status': gdpr_request.status.value
},
message='Data rectification request created. An admin will review and process your request.'
)
except Exception as e:
logger.error(f'Error creating rectification request: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
class ProcessingRestrictionRequest(BaseModel):
reason: str
@router.post('/restrict')
async def request_processing_restriction(
request: Request,
restriction_data: ProcessingRestrictionRequest,
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Request restriction of processing (Article 18 GDPR)."""
try:
client_ip = request.client.host if request.client else None
user_agent = request.headers.get('User-Agent')
gdpr_request = await gdpr_service.request_processing_restriction(
db=db,
user_id=current_user.id,
reason=restriction_data.reason,
ip_address=client_ip,
user_agent=user_agent
)
return success_response(
data={
'request_id': gdpr_request.id,
'verification_token': gdpr_request.verification_token,
'status': gdpr_request.status.value
},
message='Processing restriction request created. Your account has been temporarily restricted.'
)
except Exception as e:
logger.error(f'Error creating restriction request: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
class ProcessingObjectionRequest(BaseModel):
processing_purpose: str
reason: Optional[str] = None
@router.post('/object')
async def request_processing_objection(
request: Request,
objection_data: ProcessingObjectionRequest,
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Object to processing (Article 21 GDPR - Right to object)."""
try:
client_ip = request.client.host if request.client else None
user_agent = request.headers.get('User-Agent')
gdpr_request = await gdpr_service.request_processing_objection(
db=db,
user_id=current_user.id,
processing_purpose=objection_data.processing_purpose,
reason=objection_data.reason,
ip_address=client_ip,
user_agent=user_agent
)
return success_response(
data={
'request_id': gdpr_request.id,
'verification_token': gdpr_request.verification_token,
'status': gdpr_request.status.value
},
message='Processing objection registered. We will review your objection and stop processing for the specified purpose if valid.'
)
except Exception as e:
logger.error(f'Error creating objection request: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
# Consent Management Routes
class ConsentUpdateRequest(BaseModel):
consents: Dict[str, bool] # e.g., {"marketing": true, "analytics": false}
@router.get('/consents')
async def get_user_consents(
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Get user's consent status for all consent types."""
try:
consents = consent_service.get_user_consents(db=db, user_id=current_user.id, include_withdrawn=True)
consent_status = {}
for consent_type in ConsentType:
consent_status[consent_type.value] = {
'has_consent': consent_service.has_consent(db=db, user_id=current_user.id, consent_type=consent_type),
'granted_at': None,
'withdrawn_at': None,
'status': 'none'
}
for consent in consents:
consent_status[consent.consent_type.value] = {
'has_consent': consent.status == ConsentStatus.granted and (not consent.expires_at or consent.expires_at > datetime.utcnow()),
'granted_at': consent.granted_at.isoformat() if consent.granted_at else None,
'withdrawn_at': consent.withdrawn_at.isoformat() if consent.withdrawn_at else None,
'status': consent.status.value,
'expires_at': consent.expires_at.isoformat() if consent.expires_at else None
}
return success_response(data={'consents': consent_status})
except Exception as e:
logger.error(f'Error getting consents: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.post('/consents')
async def update_consents(
request: Request,
consent_data: ConsentUpdateRequest,
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Update user consent preferences."""
try:
client_ip = request.client.host if request.client else None
user_agent = request.headers.get('User-Agent')
# Convert string keys to ConsentType enum
consents_dict = {}
for key, value in consent_data.consents.items():
try:
consent_type = ConsentType(key)
consents_dict[consent_type] = value
except ValueError:
continue
results = await consent_service.update_consent_preferences(
db=db,
user_id=current_user.id,
consents=consents_dict,
legal_basis='consent',
ip_address=client_ip,
user_agent=user_agent,
source='gdpr_page'
)
return success_response(
data={'updated_consents': len(results)},
message='Consent preferences updated successfully'
)
except Exception as e:
logger.error(f'Error updating consents: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
@router.post('/consents/{consent_type}/withdraw')
async def withdraw_consent(
request: Request,
consent_type: str,
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Withdraw specific consent (Article 7(3) GDPR)."""
try:
client_ip = request.client.host if request.client else None
user_agent = request.headers.get('User-Agent')
try:
consent_type_enum = ConsentType(consent_type)
except ValueError:
raise HTTPException(status_code=400, detail=f'Invalid consent type: {consent_type}')
consent = await consent_service.withdraw_consent(
db=db,
user_id=current_user.id,
consent_type=consent_type_enum,
ip_address=client_ip,
user_agent=user_agent
)
return success_response(
data={
'consent_id': consent.id,
'consent_type': consent.consent_type.value,
'withdrawn_at': consent.withdrawn_at.isoformat() if consent.withdrawn_at else None
},
message=f'Consent for {consent_type} withdrawn successfully'
)
except HTTPException:
raise
except Exception as e:
logger.error(f'Error withdrawing consent: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
# Data Processing Records (User View)
@router.get('/processing-records')
async def get_user_processing_records(
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""Get data processing records for the user (Article 15 GDPR - Right of access)."""
try:
summary = data_processing_service.get_user_processing_summary(
db=db,
user_id=current_user.id
)
return success_response(data=summary)
except Exception as e:
logger.error(f'Error getting processing records: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))
# Admin Routes for Processing Requests
@router.post('/admin/rectify/{request_id}/process')
async def process_rectification(
request_id: int,
current_user: User = Depends(authorize_roles('admin')),
db: Session = Depends(get_db)
):
"""Process data rectification request (admin only)."""
try:
result = await gdpr_service.process_data_rectification(
db=db,
request_id=request_id,
processed_by=current_user.id
)
return success_response(
data=result,
message='Data rectification processed successfully'
)
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
logger.error(f'Error processing rectification: {str(e)}', exc_info=True)
raise HTTPException(status_code=500, detail=str(e))

View File

@@ -0,0 +1,169 @@
"""
Data Breach Notification Service (Articles 33-34 GDPR).
"""
from sqlalchemy.orm import Session
from typing import Dict, Any, Optional, List
from datetime import datetime, timedelta
from ..models.data_breach import DataBreach, BreachType, BreachStatus
from ...shared.config.logging_config import get_logger
from ...analytics.services.audit_service import audit_service
logger = get_logger(__name__)
class BreachService:
"""Service for managing data breach notifications (Articles 33-34 GDPR)."""
NOTIFICATION_DEADLINE_HOURS = 72 # Article 33 - 72 hours to notify authority
@staticmethod
async def create_breach(
db: Session,
breach_type: BreachType,
description: str,
reported_by: int,
affected_data_categories: Optional[List[str]] = None,
affected_data_subjects: Optional[int] = None,
occurred_at: Optional[datetime] = None,
likely_consequences: Optional[str] = None,
measures_proposed: Optional[str] = None,
risk_level: Optional[str] = None,
extra_metadata: Optional[Dict[str, Any]] = None
) -> DataBreach:
"""Create a data breach record."""
breach = DataBreach(
breach_type=breach_type,
status=BreachStatus.detected,
description=description,
affected_data_categories=affected_data_categories or [],
affected_data_subjects=affected_data_subjects,
detected_at=datetime.utcnow(),
occurred_at=occurred_at or datetime.utcnow(),
likely_consequences=likely_consequences,
measures_proposed=measures_proposed,
risk_level=risk_level or 'medium',
reported_by=reported_by,
extra_metadata=extra_metadata
)
db.add(breach)
db.commit()
db.refresh(breach)
# Log breach detection
await audit_service.log_action(
db=db,
action='data_breach_detected',
resource_type='data_breach',
user_id=reported_by,
resource_id=breach.id,
details={
'breach_type': breach_type.value,
'risk_level': risk_level,
'affected_subjects': affected_data_subjects
},
status='warning'
)
logger.warning(f'Data breach detected: {breach.id} - {breach_type.value}')
return breach
@staticmethod
async def report_to_authority(
db: Session,
breach_id: int,
authority_reference: str,
reported_by: int
) -> DataBreach:
"""Report breach to supervisory authority (Article 33)."""
breach = db.query(DataBreach).filter(DataBreach.id == breach_id).first()
if not breach:
raise ValueError('Breach not found')
breach.status = BreachStatus.reported_to_authority
breach.reported_to_authority_at = datetime.utcnow()
breach.authority_reference = authority_reference
db.commit()
db.refresh(breach)
# Check if within deadline
time_since_detection = datetime.utcnow() - breach.detected_at
if time_since_detection > timedelta(hours=BreachService.NOTIFICATION_DEADLINE_HOURS):
logger.warning(f'Breach {breach_id} reported after {BreachService.NOTIFICATION_DEADLINE_HOURS} hour deadline')
# Log report
await audit_service.log_action(
db=db,
action='breach_reported_to_authority',
resource_type='data_breach',
user_id=reported_by,
resource_id=breach_id,
details={'authority_reference': authority_reference},
status='success'
)
logger.info(f'Breach {breach_id} reported to authority: {authority_reference}')
return breach
@staticmethod
async def notify_data_subjects(
db: Session,
breach_id: int,
notification_method: str,
notified_by: int
) -> DataBreach:
"""Notify affected data subjects (Article 34)."""
breach = db.query(DataBreach).filter(DataBreach.id == breach_id).first()
if not breach:
raise ValueError('Breach not found')
breach.status = BreachStatus.notified_data_subjects
breach.notified_data_subjects_at = datetime.utcnow()
breach.notification_method = notification_method
db.commit()
db.refresh(breach)
# Log notification
await audit_service.log_action(
db=db,
action='breach_subjects_notified',
resource_type='data_breach',
user_id=notified_by,
resource_id=breach_id,
details={'notification_method': notification_method},
status='success'
)
logger.info(f'Data subjects notified for breach {breach_id}')
return breach
@staticmethod
def get_breaches(
db: Session,
status: Optional[BreachStatus] = None,
limit: int = 50,
offset: int = 0
) -> List[DataBreach]:
"""Get data breaches with optional filters."""
query = db.query(DataBreach)
if status:
query = query.filter(DataBreach.status == status)
return query.order_by(DataBreach.detected_at.desc()).offset(offset).limit(limit).all()
@staticmethod
def get_breaches_requiring_notification(
db: Session
) -> List[DataBreach]:
"""Get breaches that require notification (not yet reported)."""
deadline = datetime.utcnow() - timedelta(hours=BreachService.NOTIFICATION_DEADLINE_HOURS)
return db.query(DataBreach).filter(
DataBreach.status.in_([BreachStatus.detected, BreachStatus.investigating]),
DataBreach.detected_at < deadline
).all()
breach_service = BreachService()

View File

@@ -0,0 +1,202 @@
"""
GDPR Consent Management Service.
"""
from sqlalchemy.orm import Session
from typing import Dict, Any, Optional, List
from datetime import datetime, timedelta
from ..models.consent import Consent, ConsentType, ConsentStatus
from ...auth.models.user import User
from ...shared.config.logging_config import get_logger
from ...analytics.services.audit_service import audit_service
logger = get_logger(__name__)
class ConsentService:
"""Service for managing user consent (Article 7 GDPR)."""
@staticmethod
async def grant_consent(
db: Session,
user_id: int,
consent_type: ConsentType,
legal_basis: str,
consent_method: str = 'explicit',
consent_version: Optional[str] = None,
expires_at: Optional[datetime] = None,
ip_address: Optional[str] = None,
user_agent: Optional[str] = None,
source: Optional[str] = None,
extra_metadata: Optional[Dict[str, Any]] = None
) -> Consent:
"""Grant consent for a specific purpose."""
# Withdraw any existing consent of this type
existing = db.query(Consent).filter(
Consent.user_id == user_id,
Consent.consent_type == consent_type,
Consent.status == ConsentStatus.granted
).first()
if existing:
existing.status = ConsentStatus.withdrawn
existing.withdrawn_at = datetime.utcnow()
# Create new consent
consent = Consent(
user_id=user_id,
consent_type=consent_type,
status=ConsentStatus.granted,
granted_at=datetime.utcnow(),
expires_at=expires_at,
legal_basis=legal_basis,
consent_method=consent_method,
consent_version=consent_version,
ip_address=ip_address,
user_agent=user_agent,
source=source,
extra_metadata=extra_metadata
)
db.add(consent)
db.commit()
db.refresh(consent)
# Log consent grant
await audit_service.log_action(
db=db,
action='consent_granted',
resource_type='consent',
user_id=user_id,
resource_id=consent.id,
ip_address=ip_address,
user_agent=user_agent,
details={
'consent_type': consent_type.value,
'legal_basis': legal_basis,
'consent_method': consent_method
},
status='success'
)
logger.info(f'Consent granted: {consent_type.value} for user {user_id}')
return consent
@staticmethod
async def withdraw_consent(
db: Session,
user_id: int,
consent_type: ConsentType,
ip_address: Optional[str] = None,
user_agent: Optional[str] = None
) -> Consent:
"""Withdraw consent (Article 7(3) GDPR)."""
consent = db.query(Consent).filter(
Consent.user_id == user_id,
Consent.consent_type == consent_type,
Consent.status == ConsentStatus.granted
).order_by(Consent.granted_at.desc()).first()
if not consent:
raise ValueError(f'No active consent found for {consent_type.value}')
consent.status = ConsentStatus.withdrawn
consent.withdrawn_at = datetime.utcnow()
db.commit()
db.refresh(consent)
# Log consent withdrawal
await audit_service.log_action(
db=db,
action='consent_withdrawn',
resource_type='consent',
user_id=user_id,
resource_id=consent.id,
ip_address=ip_address,
user_agent=user_agent,
details={'consent_type': consent_type.value},
status='success'
)
logger.info(f'Consent withdrawn: {consent_type.value} for user {user_id}')
return consent
@staticmethod
def get_user_consents(
db: Session,
user_id: int,
include_withdrawn: bool = False
) -> List[Consent]:
"""Get all consents for a user."""
query = db.query(Consent).filter(Consent.user_id == user_id)
if not include_withdrawn:
query = query.filter(Consent.status == ConsentStatus.granted)
return query.order_by(Consent.granted_at.desc()).all()
@staticmethod
def has_consent(
db: Session,
user_id: int,
consent_type: ConsentType
) -> bool:
"""Check if user has active consent for a specific type."""
consent = db.query(Consent).filter(
Consent.user_id == user_id,
Consent.consent_type == consent_type,
Consent.status == ConsentStatus.granted
).first()
if not consent:
return False
# Check if expired
if consent.expires_at and consent.expires_at < datetime.utcnow():
consent.status = ConsentStatus.expired
db.commit()
return False
return True
@staticmethod
async def update_consent_preferences(
db: Session,
user_id: int,
consents: Dict[ConsentType, bool],
legal_basis: str = 'consent',
ip_address: Optional[str] = None,
user_agent: Optional[str] = None,
source: Optional[str] = None
) -> List[Consent]:
"""Update multiple consent preferences at once."""
results = []
for consent_type, granted in consents.items():
if granted:
consent = await ConsentService.grant_consent(
db=db,
user_id=user_id,
consent_type=consent_type,
legal_basis=legal_basis,
ip_address=ip_address,
user_agent=user_agent,
source=source
)
results.append(consent)
else:
try:
consent = await ConsentService.withdraw_consent(
db=db,
user_id=user_id,
consent_type=consent_type,
ip_address=ip_address,
user_agent=user_agent
)
results.append(consent)
except ValueError:
# No active consent to withdraw
pass
return results
consent_service = ConsentService()

View File

@@ -0,0 +1,128 @@
"""
Data Processing Records Service (Article 30 GDPR).
"""
from sqlalchemy.orm import Session
from typing import Dict, Any, Optional, List
from datetime import datetime
from ..models.data_processing_record import DataProcessingRecord, ProcessingCategory, LegalBasis
from ...shared.config.logging_config import get_logger
logger = get_logger(__name__)
class DataProcessingService:
"""Service for maintaining data processing records (Article 30 GDPR)."""
@staticmethod
async def create_processing_record(
db: Session,
processing_category: ProcessingCategory,
legal_basis: LegalBasis,
purpose: str,
data_categories: Optional[List[str]] = None,
data_subjects: Optional[List[str]] = None,
recipients: Optional[List[str]] = None,
third_parties: Optional[List[str]] = None,
transfers_to_third_countries: bool = False,
transfer_countries: Optional[List[str]] = None,
safeguards: Optional[str] = None,
retention_period: Optional[str] = None,
retention_criteria: Optional[str] = None,
security_measures: Optional[str] = None,
user_id: Optional[int] = None,
related_booking_id: Optional[int] = None,
related_payment_id: Optional[int] = None,
processed_by: Optional[int] = None,
extra_metadata: Optional[Dict[str, Any]] = None
) -> DataProcessingRecord:
"""Create a data processing record."""
record = DataProcessingRecord(
processing_category=processing_category,
legal_basis=legal_basis,
purpose=purpose,
data_categories=data_categories or [],
data_subjects=data_subjects or [],
recipients=recipients or [],
third_parties=third_parties or [],
transfers_to_third_countries=transfers_to_third_countries,
transfer_countries=transfer_countries or [],
safeguards=safeguards,
retention_period=retention_period,
retention_criteria=retention_criteria,
security_measures=security_measures,
user_id=user_id,
related_booking_id=related_booking_id,
related_payment_id=related_payment_id,
processed_by=processed_by,
processing_timestamp=datetime.utcnow(),
extra_metadata=extra_metadata
)
db.add(record)
db.commit()
db.refresh(record)
logger.info(f'Data processing record created: {record.id}')
return record
@staticmethod
def get_processing_records(
db: Session,
user_id: Optional[int] = None,
processing_category: Optional[ProcessingCategory] = None,
legal_basis: Optional[LegalBasis] = None,
limit: int = 100,
offset: int = 0
) -> List[DataProcessingRecord]:
"""Get data processing records with optional filters."""
query = db.query(DataProcessingRecord)
if user_id:
query = query.filter(DataProcessingRecord.user_id == user_id)
if processing_category:
query = query.filter(DataProcessingRecord.processing_category == processing_category)
if legal_basis:
query = query.filter(DataProcessingRecord.legal_basis == legal_basis)
return query.order_by(DataProcessingRecord.processing_timestamp.desc()).offset(offset).limit(limit).all()
@staticmethod
def get_user_processing_summary(
db: Session,
user_id: int
) -> Dict[str, Any]:
"""Get a summary of all data processing activities for a user."""
records = db.query(DataProcessingRecord).filter(
DataProcessingRecord.user_id == user_id
).all()
summary = {
'total_records': len(records),
'by_category': {},
'by_legal_basis': {},
'third_party_sharing': [],
'transfers_to_third_countries': []
}
for record in records:
# By category
category = record.processing_category.value
summary['by_category'][category] = summary['by_category'].get(category, 0) + 1
# By legal basis
basis = record.legal_basis.value
summary['by_legal_basis'][basis] = summary['by_legal_basis'].get(basis, 0) + 1
# Third party sharing
if record.third_parties:
summary['third_party_sharing'].extend(record.third_parties)
# Transfers
if record.transfers_to_third_countries:
summary['transfers_to_third_countries'].extend(record.transfer_countries or [])
return summary
data_processing_service = DataProcessingService()

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,141 @@
"""
Data Retention Service for GDPR compliance.
"""
from sqlalchemy.orm import Session
from typing import Dict, Any, Optional, List
from datetime import datetime, timedelta
from ..models.data_retention import RetentionRule, DataRetentionLog
from ...shared.config.logging_config import get_logger
from ...analytics.services.audit_service import audit_service
logger = get_logger(__name__)
class RetentionService:
"""Service for managing data retention policies and cleanup."""
@staticmethod
def create_retention_rule(
db: Session,
data_category: str,
retention_period_days: int,
retention_period_months: Optional[int] = None,
retention_period_years: Optional[int] = None,
legal_basis: Optional[str] = None,
legal_requirement: Optional[str] = None,
action_after_retention: str = 'anonymize',
conditions: Optional[Dict[str, Any]] = None,
description: Optional[str] = None,
created_by: Optional[int] = None
) -> RetentionRule:
"""Create a data retention rule."""
rule = RetentionRule(
data_category=data_category,
retention_period_days=retention_period_days,
retention_period_months=retention_period_months,
retention_period_years=retention_period_years,
legal_basis=legal_basis,
legal_requirement=legal_requirement,
action_after_retention=action_after_retention,
conditions=conditions,
description=description,
created_by=created_by,
is_active=True
)
db.add(rule)
db.commit()
db.refresh(rule)
logger.info(f'Retention rule created: {data_category} - {retention_period_days} days')
return rule
@staticmethod
def get_retention_rules(
db: Session,
is_active: Optional[bool] = None
) -> List[RetentionRule]:
"""Get retention rules."""
query = db.query(RetentionRule)
if is_active is not None:
query = query.filter(RetentionRule.is_active == is_active)
return query.order_by(RetentionRule.data_category).all()
@staticmethod
def get_retention_rule(
db: Session,
data_category: str
) -> Optional[RetentionRule]:
"""Get retention rule for a specific data category."""
return db.query(RetentionRule).filter(
RetentionRule.data_category == data_category,
RetentionRule.is_active == True
).first()
@staticmethod
async def log_retention_action(
db: Session,
retention_rule_id: int,
data_category: str,
action_taken: str,
records_affected: int,
affected_ids: Optional[List[int]] = None,
executed_by: Optional[int] = None,
success: bool = True,
error_message: Optional[str] = None,
extra_metadata: Optional[Dict[str, Any]] = None
) -> DataRetentionLog:
"""Log a data retention action."""
log = DataRetentionLog(
retention_rule_id=retention_rule_id,
data_category=data_category,
action_taken=action_taken,
records_affected=records_affected,
affected_ids=affected_ids or [],
executed_by=executed_by,
executed_at=datetime.utcnow(),
success=success,
error_message=error_message,
extra_metadata=extra_metadata
)
db.add(log)
db.commit()
db.refresh(log)
# Log to audit trail
await audit_service.log_action(
db=db,
action='data_retention_action',
resource_type='retention_log',
user_id=executed_by,
resource_id=log.id,
details={
'data_category': data_category,
'action_taken': action_taken,
'records_affected': records_affected
},
status='success' if success else 'error'
)
logger.info(f'Retention action logged: {action_taken} on {data_category} - {records_affected} records')
return log
@staticmethod
def get_retention_logs(
db: Session,
data_category: Optional[str] = None,
limit: int = 100,
offset: int = 0
) -> List[DataRetentionLog]:
"""Get retention action logs."""
query = db.query(DataRetentionLog)
if data_category:
query = query.filter(DataRetentionLog.data_category == data_category)
return query.order_by(DataRetentionLog.executed_at.desc()).offset(offset).limit(limit).all()
retention_service = RetentionService()

View File

@@ -2,7 +2,7 @@ from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.orm import Session, joinedload
from typing import Optional
from datetime import datetime
import random
import secrets
from ...shared.config.database import get_db
from ...shared.config.logging_config import get_logger
@@ -33,7 +33,8 @@ router = APIRouter(prefix="/service-bookings", tags=["service-bookings"])
def generate_service_booking_number() -> str:
prefix = "SB"
timestamp = datetime.utcnow().strftime("%Y%m%d")
random_suffix = random.randint(1000, 9999)
# Use cryptographically secure random number to prevent enumeration attacks
random_suffix = secrets.randbelow(9000) + 1000 # Random number between 1000-9999
return f"{prefix}{timestamp}{random_suffix}"
@router.post("/")

View File

@@ -1,7 +1,7 @@
from sqlalchemy.orm import Session
from datetime import datetime, timedelta, date
from typing import Optional
import random
import secrets
import string
from ..models.user_loyalty import UserLoyalty
from ..models.loyalty_tier import LoyaltyTier, TierLevel
@@ -78,19 +78,23 @@ class LoyaltyService:
@staticmethod
def generate_referral_code(db: Session, user_id: int, length: int = 8) -> str:
"""Generate unique referral code for user"""
"""Generate unique referral code for user using cryptographically secure random"""
max_attempts = 10
alphabet = string.ascii_uppercase + string.digits
for _ in range(max_attempts):
# Generate code: USER1234 format
code = f"USER{user_id:04d}{''.join(random.choices(string.ascii_uppercase + string.digits, k=length-8))}"
# Generate code: USER1234 format using cryptographically secure random
# Use secrets.choice() instead of random.choices() for security
random_part = ''.join(secrets.choice(alphabet) for _ in range(length-8))
code = f"USER{user_id:04d}{random_part}"
# Check if code exists
existing = db.query(UserLoyalty).filter(UserLoyalty.referral_code == code).first()
if not existing:
return code
# Fallback: timestamp-based
return f"REF{int(datetime.utcnow().timestamp())}{user_id}"
# Fallback: timestamp-based with secure random suffix
random_suffix = ''.join(secrets.choice(alphabet) for _ in range(4))
return f"REF{int(datetime.utcnow().timestamp())}{user_id}{random_suffix}"
@staticmethod
def create_default_tiers(db: Session):
@@ -340,14 +344,18 @@ class LoyaltyService:
@staticmethod
def generate_redemption_code(db: Session, length: int = 12) -> str:
"""Generate unique redemption code"""
"""Generate unique redemption code using cryptographically secure random"""
max_attempts = 10
alphabet = string.ascii_uppercase + string.digits
for _ in range(max_attempts):
code = ''.join(random.choices(string.ascii_uppercase + string.digits, k=length))
# Use secrets.choice() instead of random.choices() for security
code = ''.join(secrets.choice(alphabet) for _ in range(length))
existing = db.query(RewardRedemption).filter(RewardRedemption.code == code).first()
if not existing:
return code
return f"RED{int(datetime.utcnow().timestamp())}"
# Fallback with secure random suffix
random_suffix = ''.join(secrets.choice(alphabet) for _ in range(4))
return f"RED{int(datetime.utcnow().timestamp())}{random_suffix}"
@staticmethod
def process_referral(

View File

@@ -95,10 +95,16 @@ else:
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'Allowed CORS origins: {", ".join(settings.CORS_ORIGINS)}')
app.add_middleware(CORSMiddleware, allow_origins=settings.CORS_ORIGINS or [], allow_credentials=True, allow_methods=['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'], allow_headers=['*'])
# SECURITY: Use explicit headers instead of wildcard to prevent header injection
app.add_middleware(
CORSMiddleware,
allow_origins=settings.CORS_ORIGINS or [],
allow_credentials=True,
allow_methods=['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'],
allow_headers=['Content-Type', 'Authorization', 'X-XSRF-TOKEN', 'X-Requested-With', 'X-Request-ID', 'Accept', 'Accept-Language']
)
uploads_dir = Path(__file__).parent.parent / settings.UPLOAD_DIR
uploads_dir.mkdir(exist_ok=True)
app.mount('/uploads', StaticFiles(directory=str(uploads_dir)), name='uploads')
app.add_exception_handler(HTTPException, http_exception_handler)
app.add_exception_handler(RequestValidationError, validation_exception_handler)
app.add_exception_handler(IntegrityError, integrity_error_handler)
@@ -108,18 +114,18 @@ app.add_exception_handler(Exception, general_exception_handler)
@app.get('/health', tags=['health'])
@app.get('/api/health', tags=['health'])
async def health_check(db: Session=Depends(get_db)):
"""Comprehensive health check endpoint"""
"""
Public health check endpoint.
Returns minimal information for security - no sensitive details exposed.
"""
health_status = {
'status': 'healthy',
'timestamp': datetime.utcnow().isoformat(),
'service': settings.APP_NAME,
'version': settings.APP_VERSION,
'environment': settings.ENVIRONMENT,
# SECURITY: Don't expose service name, version, or environment in public endpoint
'checks': {
'api': 'ok',
'database': 'unknown',
'disk_space': 'unknown',
'memory': 'unknown'
'database': 'unknown'
# SECURITY: Don't expose disk_space or memory details publicly
}
}
@@ -131,60 +137,26 @@ async def health_check(db: Session=Depends(get_db)):
except OperationalError as e:
health_status['status'] = 'unhealthy'
health_status['checks']['database'] = 'error'
health_status['error'] = str(e)
# SECURITY: Don't expose database error details publicly
logger.error(f'Database health check failed: {str(e)}')
# Remove error details from response
return JSONResponse(status_code=status.HTTP_503_SERVICE_UNAVAILABLE, content=health_status)
except Exception as e:
health_status['status'] = 'unhealthy'
health_status['checks']['database'] = 'error'
health_status['error'] = str(e)
# SECURITY: Don't expose error details publicly
logger.error(f'Health check failed: {str(e)}')
# Remove error details from response
return JSONResponse(status_code=status.HTTP_503_SERVICE_UNAVAILABLE, content=health_status)
# Check disk space (if available)
try:
import shutil
disk = shutil.disk_usage('/')
free_percent = (disk.free / disk.total) * 100
if free_percent < 10:
health_status['checks']['disk_space'] = 'warning'
health_status['status'] = 'degraded'
else:
health_status['checks']['disk_space'] = 'ok'
health_status['disk_space'] = {
'free_gb': round(disk.free / (1024**3), 2),
'total_gb': round(disk.total / (1024**3), 2),
'free_percent': round(free_percent, 2)
}
except Exception:
health_status['checks']['disk_space'] = 'unknown'
# Check memory (if available)
try:
import psutil
memory = psutil.virtual_memory()
if memory.percent > 90:
health_status['checks']['memory'] = 'warning'
if health_status['status'] == 'healthy':
health_status['status'] = 'degraded'
else:
health_status['checks']['memory'] = 'ok'
health_status['memory'] = {
'used_percent': round(memory.percent, 2),
'available_gb': round(memory.available / (1024**3), 2),
'total_gb': round(memory.total / (1024**3), 2)
}
except ImportError:
# psutil not available, skip memory check
health_status['checks']['memory'] = 'unavailable'
except Exception:
health_status['checks']['memory'] = 'unknown'
# SECURITY: Disk space and memory checks removed from public endpoint
# These details should only be available on internal/admin health endpoint
# Determine overall status
if health_status['status'] == 'healthy' and any(
check == 'warning' for check in health_status['checks'].values()
check == 'error' for check in health_status['checks'].values()
):
health_status['status'] = 'degraded'
health_status['status'] = 'unhealthy'
status_code = status.HTTP_200_OK
if health_status['status'] == 'unhealthy':
@@ -195,8 +167,110 @@ async def health_check(db: Session=Depends(get_db)):
return JSONResponse(status_code=status_code, content=health_status)
@app.get('/metrics', tags=['monitoring'])
async def metrics():
return {'status': 'success', 'service': settings.APP_NAME, 'version': settings.APP_VERSION, 'environment': settings.ENVIRONMENT, 'timestamp': datetime.utcnow().isoformat()}
async def metrics(
current_user = Depends(lambda: None)
):
"""
Protected metrics endpoint - requires admin or staff authentication.
SECURITY: Prevents information disclosure to unauthorized users.
"""
from ..security.middleware.auth import authorize_roles
# Only allow admin and staff to access metrics
# Use authorize_roles as dependency - it will check authorization automatically
admin_or_staff = authorize_roles('admin', 'staff')
# FastAPI will inject dependencies when this dependency is resolved
current_user = admin_or_staff()
return {
'status': 'success',
'service': settings.APP_NAME,
'version': settings.APP_VERSION,
'environment': settings.ENVIRONMENT,
'timestamp': datetime.utcnow().isoformat()
}
# Custom route for serving uploads with CORS headers
# This route takes precedence over the mount below
from fastapi.responses import FileResponse
import re
@app.options('/uploads/{file_path:path}')
async def serve_upload_file_options(file_path: str, request: Request):
"""Handle CORS preflight for upload files."""
origin = request.headers.get('origin')
if origin:
if settings.is_development:
if re.match(r'http://(localhost|127\.0\.0\.1)(:\d+)?', origin):
return JSONResponse(
content={},
headers={
'Access-Control-Allow-Origin': origin,
'Access-Control-Allow-Credentials': 'true',
'Access-Control-Allow-Methods': 'GET, HEAD, OPTIONS',
'Access-Control-Allow-Headers': '*',
'Access-Control-Max-Age': '3600'
}
)
elif origin in (settings.CORS_ORIGINS or []):
return JSONResponse(
content={},
headers={
'Access-Control-Allow-Origin': origin,
'Access-Control-Allow-Credentials': 'true',
'Access-Control-Allow-Methods': 'GET, HEAD, OPTIONS',
'Access-Control-Allow-Headers': '*',
'Access-Control-Max-Age': '3600'
}
)
return JSONResponse(content={})
@app.get('/uploads/{file_path:path}')
@app.head('/uploads/{file_path:path}')
async def serve_upload_file(file_path: str, request: Request):
"""Serve uploaded files with proper CORS headers."""
file_location = uploads_dir / file_path
# Security: Prevent directory traversal
try:
resolved_path = file_location.resolve()
resolved_uploads = uploads_dir.resolve()
if not str(resolved_path).startswith(str(resolved_uploads)):
raise HTTPException(status_code=403, detail="Access denied")
except (ValueError, OSError):
raise HTTPException(status_code=404, detail="File not found")
if not file_location.exists() or not file_location.is_file():
raise HTTPException(status_code=404, detail="File not found")
# Get origin from request
origin = request.headers.get('origin')
# Prepare response
response = FileResponse(str(file_location))
# Add CORS headers if origin matches
if origin:
if settings.is_development:
if re.match(r'http://(localhost|127\.0\.0\.1)(:\d+)?', origin):
response.headers['Access-Control-Allow-Origin'] = origin
response.headers['Access-Control-Allow-Credentials'] = 'true'
response.headers['Access-Control-Allow-Methods'] = 'GET, HEAD, OPTIONS'
response.headers['Access-Control-Allow-Headers'] = '*'
response.headers['Access-Control-Expose-Headers'] = '*'
elif origin in (settings.CORS_ORIGINS or []):
response.headers['Access-Control-Allow-Origin'] = origin
response.headers['Access-Control-Allow-Credentials'] = 'true'
response.headers['Access-Control-Allow-Methods'] = 'GET, HEAD, OPTIONS'
response.headers['Access-Control-Allow-Headers'] = '*'
response.headers['Access-Control-Expose-Headers'] = '*'
return response
# Mount static files as fallback (routes take precedence)
from starlette.staticfiles import StaticFiles
app.mount('/uploads-static', StaticFiles(directory=str(uploads_dir)), name='uploads-static')
# Import all route modules from feature-based structure
from .auth.routes import auth_routes, user_routes
from .rooms.routes import room_routes, advanced_room_routes, rate_plan_routes
@@ -219,6 +293,7 @@ from .security.routes import security_routes, compliance_routes
from .system.routes import system_settings_routes, workflow_routes, task_routes, approval_routes, backup_routes
from .ai.routes import ai_assistant_routes
from .compliance.routes import gdpr_routes
from .compliance.routes.gdpr_admin_routes import router as gdpr_admin_routes
from .integrations.routes import webhook_routes, api_key_routes
from .auth.routes import session_routes
@@ -274,6 +349,7 @@ app.include_router(blog_routes.router, prefix=api_prefix)
app.include_router(ai_assistant_routes.router, prefix=api_prefix)
app.include_router(approval_routes.router, prefix=api_prefix)
app.include_router(gdpr_routes.router, prefix=api_prefix)
app.include_router(gdpr_admin_routes, prefix=api_prefix)
app.include_router(webhook_routes.router, prefix=api_prefix)
app.include_router(api_key_routes.router, prefix=api_prefix)
app.include_router(session_routes.router, prefix=api_prefix)
@@ -281,57 +357,38 @@ app.include_router(backup_routes.router, prefix=api_prefix)
logger.info('All routes registered successfully')
def ensure_jwt_secret():
"""Generate and save JWT secret if it's using the default value.
In production, fail fast if default secret is used for security.
In development, auto-generate a secure secret if needed.
"""
default_secret = 'dev-secret-key-change-in-production-12345'
Validate JWT secret is properly configured.
SECURITY: JWT_SECRET must be explicitly set via environment variable.
No default values are acceptable for security.
"""
current_secret = settings.JWT_SECRET
# Security check: Fail fast in production if using default secret
if settings.is_production and (not current_secret or current_secret == default_secret):
error_msg = (
'CRITICAL SECURITY ERROR: JWT_SECRET is using default value in production! '
'Please set a secure JWT_SECRET in your environment variables.'
)
logger.error(error_msg)
raise ValueError(error_msg)
# Development mode: Auto-generate if needed
if not current_secret or current_secret == default_secret:
new_secret = secrets.token_urlsafe(64)
os.environ['JWT_SECRET'] = new_secret
env_file = Path(__file__).parent.parent / '.env'
if env_file.exists():
try:
env_content = env_file.read_text(encoding='utf-8')
jwt_pattern = re.compile(r'^JWT_SECRET=.*$', re.MULTILINE)
if jwt_pattern.search(env_content):
env_content = jwt_pattern.sub(f'JWT_SECRET={new_secret}', env_content)
else:
jwt_section_pattern = re.compile(r'(# =+.*JWT.*=+.*\n)', re.IGNORECASE | re.MULTILINE)
match = jwt_section_pattern.search(env_content)
if match:
insert_pos = match.end()
env_content = env_content[:insert_pos] + f'JWT_SECRET={new_secret}\n' + env_content[insert_pos:]
else:
env_content += f'\nJWT_SECRET={new_secret}\n'
env_file.write_text(env_content, encoding='utf-8')
logger.info('✓ JWT secret generated and saved to .env file')
except Exception as e:
logger.warning(f'Could not update .env file: {e}')
logger.info(f'Generated JWT secret (add to .env manually): JWT_SECRET={new_secret}')
# SECURITY: JWT_SECRET validation is now handled in settings.py
# This function is kept for backward compatibility and logging
if not current_secret or current_secret.strip() == '':
if settings.is_production:
# This should not happen as settings validation should catch it
error_msg = (
'CRITICAL SECURITY ERROR: JWT_SECRET is not configured. '
'Please set JWT_SECRET environment variable before starting the application.'
)
logger.error(error_msg)
raise ValueError(error_msg)
else:
logger.info(f'Generated JWT secret (add to .env file): JWT_SECRET={new_secret}')
logger.info('✓ Secure JWT secret generated automatically')
logger.warning(
'JWT_SECRET is not configured. Authentication will fail. '
'Set JWT_SECRET environment variable before starting the application.'
)
else:
# Validate secret strength
if len(current_secret) < 64:
if settings.is_production:
logger.warning(
f'JWT_SECRET is only {len(current_secret)} characters. '
'Recommend using at least 64 characters for production security.'
)
logger.info('✓ JWT secret is configured')
@app.on_event('startup')
@@ -375,7 +432,34 @@ async def shutdown_event():
logger.info(f'{settings.APP_NAME} shutting down gracefully')
if __name__ == '__main__':
import uvicorn
import os
import signal
import sys
from pathlib import Path
def signal_handler(sig, frame):
"""Handle Ctrl+C gracefully."""
logger.info('\nReceived interrupt signal (Ctrl+C). Shutting down gracefully...')
sys.exit(0)
# Register signal handler for graceful shutdown on Ctrl+C
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
base_dir = Path(__file__).parent.parent
src_dir = str(base_dir / 'src')
uvicorn.run('src.main:app', host=settings.HOST, port=settings.PORT, reload=settings.is_development, log_level=settings.LOG_LEVEL.lower(), reload_dirs=[src_dir] if settings.is_development else None, reload_excludes=['*.log', '*.pyc', '*.pyo', '*.pyd', '__pycache__', '**/__pycache__/**', '*.db', '*.sqlite', '*.sqlite3'], reload_delay=0.5)
# Enable hot reload in development mode or if explicitly enabled via environment variable
use_reload = settings.is_development or os.getenv('ENABLE_RELOAD', 'false').lower() == 'true'
if use_reload:
logger.info('Hot reload enabled - server will restart on code changes')
logger.info('Press Ctrl+C to stop the server')
uvicorn.run(
'src.main:app',
host=settings.HOST,
port=settings.PORT,
reload=use_reload,
log_level=settings.LOG_LEVEL.lower(),
reload_dirs=[src_dir] if use_reload else None,
reload_excludes=['*.log', '*.pyc', '*.pyo', '*.pyd', '__pycache__', '**/__pycache__/**', '*.db', '*.sqlite', '*.sqlite3', 'venv/**', '.venv/**'],
reload_delay=0.5
)

View File

@@ -174,10 +174,13 @@ class BoricaService:
backend=default_backend()
)
# NOTE: SHA1 is required by Borica payment gateway protocol
# This is a known security trade-off required for payment gateway compatibility
# Monitor for Borica protocol updates that support stronger algorithms
signature = private_key.sign(
data.encode('utf-8'),
padding.PKCS1v15(),
hashes.SHA1()
hashes.SHA1() # nosec B303 # Required by Borica protocol - acceptable risk
)
return base64.b64encode(signature).decode('utf-8')
except Exception as e:
@@ -228,11 +231,13 @@ class BoricaService:
public_key = cert.public_key()
signature_bytes = base64.b64decode(signature)
# NOTE: SHA1 is required by Borica payment gateway protocol
# This is a known security trade-off required for payment gateway compatibility
public_key.verify(
signature_bytes,
signature_data.encode('utf-8'),
padding.PKCS1v15(),
hashes.SHA1()
hashes.SHA1() # nosec B303 # Required by Borica protocol - acceptable risk
)
return True
except Exception as e:

View File

@@ -10,7 +10,12 @@ class SecurityHeadersMiddleware(BaseHTTPMiddleware):
async def dispatch(self, request: Request, call_next):
response = await call_next(request)
security_headers = {'X-Content-Type-Options': 'nosniff', 'X-Frame-Options': 'DENY', 'X-XSS-Protection': '1; mode=block', 'Referrer-Policy': 'strict-origin-when-cross-origin', 'Permissions-Policy': 'geolocation=(), microphone=(), camera=()'}
security_headers.setdefault('Cross-Origin-Resource-Policy', 'cross-origin')
# Allow cross-origin resource sharing for uploads/images
# This is needed for images to load from different origins in development
if '/uploads/' in str(request.url):
security_headers.setdefault('Cross-Origin-Resource-Policy', 'cross-origin')
else:
security_headers.setdefault('Cross-Origin-Resource-Policy', 'same-origin')
if settings.is_production:
# Enhanced CSP with stricter directives
# Using 'strict-dynamic' for better security with nonce-based scripts

View File

@@ -10,14 +10,14 @@ class Settings(BaseSettings):
ENVIRONMENT: str = Field(default='development', description='Environment: development, staging, production')
DEBUG: bool = Field(default=False, description='Debug mode')
API_V1_PREFIX: str = Field(default='/api/v1', description='API v1 prefix')
HOST: str = Field(default='0.0.0.0', description='Server host')
HOST: str = Field(default='0.0.0.0', description='Server host. WARNING: 0.0.0.0 binds to all interfaces. Use 127.0.0.1 for development or specific IP for production.') # nosec B104 # Acceptable default with validation warning in production
PORT: int = Field(default=8000, description='Server port')
DB_USER: str = Field(default='root', description='Database user')
DB_PASS: str = Field(default='', description='Database password')
DB_NAME: str = Field(default='hotel_db', description='Database name')
DB_HOST: str = Field(default='localhost', description='Database host')
DB_PORT: str = Field(default='3306', description='Database port')
JWT_SECRET: str = Field(default='dev-secret-key-change-in-production-12345', description='JWT secret key')
JWT_SECRET: str = Field(default='', description='JWT secret key - MUST be set via environment variable. Minimum 64 characters recommended for production.')
JWT_ALGORITHM: str = Field(default='HS256', description='JWT algorithm')
JWT_ACCESS_TOKEN_EXPIRE_MINUTES: int = Field(default=30, description='JWT access token expiration in minutes')
JWT_REFRESH_TOKEN_EXPIRE_DAYS: int = Field(default=3, description='JWT refresh token expiration in days (reduced from 7 for better security)')
@@ -97,6 +97,20 @@ class Settings(BaseSettings):
IP_WHITELIST_ENABLED: bool = Field(default=False, description='Enable IP whitelisting for admin endpoints')
ADMIN_IP_WHITELIST: List[str] = Field(default_factory=list, description='List of allowed IP addresses/CIDR ranges for admin endpoints')
def validate_host_configuration(self) -> None:
"""
Validate HOST configuration for security.
Warns if binding to all interfaces (0.0.0.0) in production.
"""
if self.HOST == '0.0.0.0' and self.is_production:
import logging
logger = logging.getLogger(__name__)
logger.warning(
'SECURITY WARNING: HOST is set to 0.0.0.0 in production. '
'This binds the server to all network interfaces. '
'Consider using a specific IP address or ensure proper firewall rules are in place.'
)
def validate_encryption_key(self) -> None:
"""
Validate encryption key is properly configured.
@@ -138,4 +152,41 @@ class Settings(BaseSettings):
logger = logging.getLogger(__name__)
logger.warning(f'Invalid ENCRYPTION_KEY format: {str(e)}')
settings = Settings()
settings = Settings()
# Validate JWT_SECRET on startup - fail fast if not configured
def validate_jwt_secret():
"""Validate JWT_SECRET is properly configured. Called on startup."""
if not settings.JWT_SECRET or settings.JWT_SECRET.strip() == '':
error_msg = (
'CRITICAL SECURITY ERROR: JWT_SECRET is not configured. '
'Please set JWT_SECRET environment variable to a secure random string. '
'Minimum 64 characters recommended for production. '
'Generate one using: python -c "import secrets; print(secrets.token_urlsafe(64))"'
)
import logging
logger = logging.getLogger(__name__)
logger.error(error_msg)
if settings.is_production:
raise ValueError(error_msg)
else:
logger.warning(
'JWT_SECRET not configured. This will cause authentication to fail. '
'Set JWT_SECRET environment variable before starting the application.'
)
# Warn if using weak secret (less than 64 characters)
if len(settings.JWT_SECRET) < 64:
import logging
logger = logging.getLogger(__name__)
if settings.is_production:
logger.warning(
f'JWT_SECRET is only {len(settings.JWT_SECRET)} characters. '
'Recommend using at least 64 characters for production security.'
)
else:
logger.debug(f'JWT_SECRET length: {len(settings.JWT_SECRET)} characters')
# Validate on import
validate_jwt_secret()
settings.validate_host_configuration()

View File

@@ -0,0 +1,168 @@
"""
HTML/XSS sanitization utilities using bleach library.
Prevents stored XSS attacks by sanitizing user-generated content.
"""
import bleach
from typing import Optional
# Allowed HTML tags for rich text content
ALLOWED_TAGS = [
'p', 'br', 'strong', 'em', 'u', 'b', 'i', 's', 'strike',
'a', 'ul', 'ol', 'li', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6',
'blockquote', 'pre', 'code', 'hr', 'div', 'span',
'table', 'thead', 'tbody', 'tr', 'th', 'td',
'img'
]
# Allowed attributes for specific tags
ALLOWED_ATTRIBUTES = {
'a': ['href', 'title', 'target', 'rel'],
'img': ['src', 'alt', 'title', 'width', 'height'],
'div': ['class'],
'span': ['class'],
'p': ['class'],
'table': ['class', 'border'],
'th': ['colspan', 'rowspan'],
'td': ['colspan', 'rowspan']
}
# Allowed URL schemes
ALLOWED_PROTOCOLS = ['http', 'https', 'mailto']
# Allowed CSS classes (optional - can be expanded)
ALLOWED_STYLES = []
def sanitize_html(content: Optional[str], strip: bool = False) -> str:
"""
Sanitize HTML content to prevent XSS attacks.
Args:
content: The HTML content to sanitize (can be None)
strip: If True, remove disallowed tags instead of escaping them
Returns:
Sanitized HTML string
"""
if not content:
return ''
if not isinstance(content, str):
content = str(content)
# Sanitize HTML
sanitized = bleach.clean(
content,
tags=ALLOWED_TAGS,
attributes=ALLOWED_ATTRIBUTES,
protocols=ALLOWED_PROTOCOLS,
strip=strip,
strip_comments=True
)
# Linkify URLs (convert plain URLs to links)
# Only linkify if content doesn't already contain HTML links
if '<a' not in sanitized:
sanitized = bleach.linkify(
sanitized,
protocols=ALLOWED_PROTOCOLS,
parse_email=True
)
return sanitized
def sanitize_text(content: Optional[str]) -> str:
"""
Strip all HTML tags from content, leaving only plain text.
Useful for fields that should not contain any HTML.
Args:
content: The content to sanitize (can be None)
Returns:
Plain text string with all HTML removed
"""
if not content:
return ''
if not isinstance(content, str):
content = str(content)
# Strip all HTML tags
return bleach.clean(content, tags=[], strip=True)
def sanitize_filename(filename: str) -> str:
"""
Sanitize filename to prevent path traversal and other attacks.
Args:
filename: The original filename
Returns:
Sanitized filename safe for filesystem operations
"""
import os
import secrets
from pathlib import Path
if not filename:
# Generate a random filename if none provided
return f"{secrets.token_urlsafe(16)}.bin"
# Remove path components (prevent directory traversal)
filename = os.path.basename(filename)
# Remove dangerous characters
# Keep only alphanumeric, dots, dashes, and underscores
safe_chars = []
for char in filename:
if char.isalnum() or char in '._-':
safe_chars.append(char)
else:
safe_chars.append('_')
filename = ''.join(safe_chars)
# Limit length (filesystem limit is typically 255)
if len(filename) > 255:
name, ext = os.path.splitext(filename)
max_name_length = 255 - len(ext)
filename = name[:max_name_length] + ext
# Ensure filename is not empty
if not filename or filename == '.' or filename == '..':
filename = f"{secrets.token_urlsafe(16)}.bin"
return filename
def sanitize_url(url: Optional[str]) -> Optional[str]:
"""
Sanitize URL to ensure it uses allowed protocols.
Args:
url: The URL to sanitize
Returns:
Sanitized URL or None if invalid
"""
if not url:
return None
if not isinstance(url, str):
url = str(url)
# Check if URL uses allowed protocol
url_lower = url.lower().strip()
if any(url_lower.startswith(proto + ':') for proto in ALLOWED_PROTOCOLS):
return url
# If no protocol, assume https
if '://' not in url:
return f'https://{url}'
# Invalid protocol - return None
return None